lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
apache-2.0
c37b169e1dd8d4f4f823c65215de087caa4e6798
0
roalva1/opencga,javild/opencga,kalyanreddyemani/opencga,roalva1/opencga,roalva1/opencga,j-coll/opencga,j-coll/opencga,opencb/opencga,j-coll/opencga,roalva1/opencga,kalyanreddyemani/opencga,opencb/opencga,j-coll/opencga,opencb/opencga,kalyanreddyemani/opencga,opencb/opencga,javild/opencga,kalyanreddyemani/opencga,javild/opencga,kalyanreddyemani/opencga,javild/opencga,roalva1/opencga,javild/opencga,roalva1/opencga,roalva1/opencga,opencb/opencga,javild/opencga,roalva1/opencga,opencb/opencga,roalva1/opencga,j-coll/opencga,kalyanreddyemani/opencga,j-coll/opencga
package org.opencb.opencga.storage.variant; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.WebResource; import org.apache.commons.lang.StringUtils; import org.opencb.commons.bioformats.feature.Region; import org.opencb.commons.bioformats.variant.Variant; import org.opencb.commons.bioformats.variant.json.VariantAnalysisInfo; import org.opencb.commons.bioformats.variant.json.VariantControl; import org.opencb.commons.bioformats.variant.json.VariantInfo; import org.opencb.commons.bioformats.variant.utils.effect.VariantEffect; import org.opencb.commons.bioformats.variant.utils.stats.VariantStats; import org.opencb.commons.containers.QueryResult; import org.opencb.commons.containers.map.ObjectMap; import org.opencb.commons.containers.map.QueryOptions; import org.opencb.opencga.lib.auth.SqliteCredentials; import org.opencb.opencga.lib.common.XObject; import org.opencb.opencga.storage.indices.SqliteManager; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.*; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author Alejandro Aleman Ramos <[email protected]> * @author Cristina Yenyxe Gonzalez Garcia <[email protected]> */ public class VariantSqliteQueryBuilder implements VariantQueryBuilder { private SqliteCredentials sqliteCredentials; private SqliteManager sqliteManager; public VariantSqliteQueryBuilder() { System.out.println("Variant Query Maker"); } public VariantSqliteQueryBuilder(SqliteCredentials sqliteCredentials) { System.out.println("Variant Query Maker"); this.sqliteCredentials = sqliteCredentials; this.sqliteManager = new SqliteManager(); } @Override public QueryResult getAllVariantsByRegion(Region region, String studyName, QueryOptions options) { Connection con; Statement stmt; List<VariantInfo> list = new ArrayList<>(100); String dbName = (String) options.get("db_name"); showDb(dbName); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); List<String> whereClauses = new ArrayList<>(10); StringBuilder regionClauses = new StringBuilder(); regionClauses.append("( variant_stats.chromosome='").append(region.getChromosome()).append("' AND "); regionClauses.append("variant_stats.position>=").append(String.valueOf(region.getStart())).append(" AND "); regionClauses.append("variant_stats.position<=").append(String.valueOf(region.getStart())).append(" )"); regionClauses.append(" ) "); whereClauses.add(regionClauses.toString()); String sql = "SELECT count(*) as count FROM sample ;"; stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); int numSamples = 0; while (rs.next()) { numSamples = rs.getInt("count"); } stmt.close(); String innerJoinVariantSQL = " left join variant_info on variant.id_variant=variant_info.id_variant "; String innerJoinEffectSQL = " inner join variant_effect on variant_effect.chromosome=variant.chromosome AND variant_effect.position=variant.position AND variant_effect.reference_allele=variant.ref AND variant_effect.alternative_allele = variant.alt "; sql = "SELECT distinct variant.genes,variant.consequence_types, variant.id_variant, variant_info.key, variant_info.value, sample_info.sample_name, sample_info.allele_1, sample_info.allele_2, variant_stats.chromosome ," + "variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt , variant_stats.id , variant_stats.maf , variant_stats.mgf, " + "variant_stats.allele_maf , variant_stats.genotype_maf , variant_stats.miss_allele , variant_stats.miss_gt , variant_stats.mendel_err ," + "variant_stats.is_indel , variant_stats.cases_percent_dominant , variant_stats.controls_percent_dominant , variant_stats.cases_percent_recessive , variant_stats.controls_percent_recessive " + //, variant_stats.genotypes " + " FROM variant_stats " + "inner join variant on variant_stats.chromosome=variant.chromosome AND variant_stats.position=variant.position AND variant_stats.allele_ref=variant.ref AND variant_stats.allele_alt=variant.alt " + //innerJoinEffectSQL + "inner join sample_info on variant.id_variant=sample_info.id_variant " + innerJoinVariantSQL; if (whereClauses.size() > 0) { StringBuilder where = new StringBuilder(" where "); for (int i = 0; i < whereClauses.size(); i++) { where.append(whereClauses.get(i)); if (i < whereClauses.size() - 1) { where.append(" AND "); } } sql += where.toString() + " ORDER BY variant_stats.chromosome , variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt ;"; } System.out.println(sql); System.out.println("Start SQL"); long start = System.currentTimeMillis(); stmt = con.createStatement(); rs = stmt.executeQuery(sql); VariantStats vs; VariantInfo vi = null; String chr = ""; int pos = 0; String ref = "", alt = ""; System.out.println("End SQL: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); System.out.println("Processing"); while (rs.next()) { if (!rs.getString("chromosome").equals(chr) || rs.getInt("position") != pos || !rs.getString("allele_ref").equals(ref) || !rs.getString("allele_alt").equals(alt)) { chr = rs.getString("chromosome"); pos = rs.getInt("position"); ref = rs.getString("allele_ref"); alt = rs.getString("allele_alt"); // if (vi != null && filterGenotypes(vi, numSamples) && filterControls(vi, controlsMAFs)) { if (vi != null) { // Modified by Cristina list.add(vi); } vi = new VariantInfo(chr, pos, ref, alt); vs = new VariantStats(chr, pos, ref, alt, rs.getDouble("maf"), rs.getDouble("mgf"), rs.getString("allele_maf"), rs.getString("genotype_maf"), rs.getInt("miss_allele"), rs.getInt("miss_gt"), rs.getInt("mendel_err"), rs.getInt("is_indel") == 1, rs.getDouble("cases_percent_dominant"), rs.getDouble("controls_percent_dominant"), rs.getDouble("cases_percent_recessive"), rs.getDouble("controls_percent_recessive")); vs.setId(rs.getString("id")); // vi.addGenotypes(rs.getString("genotypes")); vi.addStats(vs); vi.addGenes(rs.getString("genes")); vi.addConsequenceTypes(rs.getString("consequence_types")); } if (rs.getString("key") != null && rs.getString("value") != null) { vi.addControl(rs.getString("key"), rs.getString("value")); } String sample = rs.getString("sample_name"); String gt = rs.getInt("allele_1") + "/" + rs.getInt("allele_2"); vi.addSammpleGenotype(sample, gt); // vi.addGeneAndConsequenceType(rs.getString("gene_name"), rs.getString("consequence_type_obo")); } // if (vi != null && filterGenotypes(vi, numSamples) && filterControls(vi, controlsMAFs)) { if (vi != null) { // Modified by Cristina list.add(vi); } stmt.close(); System.out.println("Total: (" + list.size() + ")"); System.out.println("End processing: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("STATS: " + e.getClass().getName() + ": " + e.getMessage()); } // return list; return new QueryResult(); } @Override public List<QueryResult> getAllVariantsByRegionList(List<Region> region, String studyName, QueryOptions options) { return null; // TODO aaleman: Implementation needed } @Override public QueryResult<ObjectMap> getVariantsHistogramByRegion(Region region, String studyName, boolean histogramLogarithm, int histogramMax) { QueryResult<ObjectMap> queryResult = new QueryResult<>(String.format("%s:%d-%d", region.getChromosome(), region.getStart(), region.getEnd())); // TODO Fill metadata List<ObjectMap> data = new ArrayList<>(); long startTime = System.currentTimeMillis(); Path metaDir = getMetaDir(sqliteCredentials.getPath()); String fileName = sqliteCredentials.getPath().getFileName().toString(); try { long startDbTime = System.currentTimeMillis(); sqliteManager.connect(metaDir.resolve(Paths.get(fileName)), true); System.out.println("SQLite path: " + metaDir.resolve(Paths.get(fileName)).toString()); String queryString = "SELECT * FROM chunk WHERE chromosome='" + region.getChromosome() + "' AND start <= " + region.getEnd() + " AND end >= " + region.getStart(); List<XObject> queryResults = sqliteManager.query(queryString); sqliteManager.disconnect(true); queryResult.setDbTime(System.currentTimeMillis() - startDbTime); int resultSize = queryResults.size(); if (resultSize > histogramMax) { // Need to group results to fit maximum size of the histogram int sumChunkSize = resultSize / histogramMax; int i = 0, j = 0; int featuresCount = 0; ObjectMap item = null; for (XObject result : queryResults) { featuresCount += result.getInt("features_count"); if (i == 0) { item = new ObjectMap("chromosome", result.getString("chromosome")); item.put("chunkId", result.getInt("chunk_id")); item.put("start", result.getInt("start")); } else if (i == sumChunkSize - 1 || j == resultSize - 1) { if (histogramLogarithm) { item.put("featuresCount", (featuresCount > 0) ? Math.log(featuresCount) : 0); } else { item.put("featuresCount", featuresCount); } item.put("end", result.getInt("end")); data.add(item); i = -1; featuresCount = 0; } j++; i++; } } else { for (XObject result : queryResults) { ObjectMap item = new ObjectMap("chromosome", result.getString("chromosome")); item.put("chunkId", result.getInt("chunk_id")); item.put("start", result.getInt("start")); if (histogramLogarithm) { int features_count = result.getInt("features_count"); result.put("featuresCount", (features_count > 0) ? Math.log(features_count) : 0); } else { item.put("featuresCount", result.getInt("features_count")); } item.put("end", result.getInt("end")); data.add(item); } } } catch (ClassNotFoundException | SQLException ex ) { Logger.getLogger(VariantSqliteQueryBuilder.class.getName()).log(Level.SEVERE, null, ex); queryResult.setErrorMsg(ex.getMessage()); } queryResult.setResult(data); queryResult.setNumResults(data.size()); queryResult.setTime(System.currentTimeMillis() - startTime); return queryResult; } @Override public QueryResult getStatsByVariant(Variant variant, QueryOptions options) { return null; // TODO aaleman: Implementation needed } @Override public QueryResult getSimpleStatsByVariant(Variant variant, QueryOptions options) { return null; // TODO aaleman: Implementation needed } @Override public QueryResult getEffectsByVariant(Variant variant, QueryOptions options) { return null; // TODO aaleman: Implementation needed } @Override public List<VariantInfo> getRecords(Map<String, String> options) { Connection con; Statement stmt; List<VariantInfo> list = new ArrayList<>(100); String dbName = options.get("db_name"); showDb(dbName); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); List<String> whereClauses = new ArrayList<>(10); Map<String, List<String>> sampleGenotypes; Map<String, String> controlsMAFs = new LinkedHashMap<>(); sampleGenotypes = processSamplesGT(options); if (options.containsKey("region_list") && !options.get("region_list").equals("")) { StringBuilder regionClauses = new StringBuilder("("); String[] regions = options.get("region_list").split(","); Pattern patternReg = Pattern.compile("(\\w+):(\\d+)-(\\d+)"); Matcher matcherReg, matcherChr; for (int i = 0; i < regions.length; i++) { String region = regions[i]; matcherReg = patternReg.matcher(region); if (matcherReg.find()) { String chr = matcherReg.group(1); int start = Integer.valueOf(matcherReg.group(2)); int end = Integer.valueOf(matcherReg.group(3)); regionClauses.append("( variant_stats.chromosome='").append(chr).append("' AND "); regionClauses.append("variant_stats.position>=").append(start).append(" AND "); regionClauses.append("variant_stats.position<=").append(end).append(" )"); if (i < (regions.length - 1)) { regionClauses.append(" OR "); } } else { Pattern patternChr = Pattern.compile("(\\w+)"); matcherChr = patternChr.matcher(region); if (matcherChr.find()) { String chr = matcherChr.group(); regionClauses.append("( variant_stats.chromosome='").append(chr).append("')"); if (i < (regions.length - 1)) { regionClauses.append(" OR "); } } else { System.err.println("ERROR: Region (" + region + ")"); } } } regionClauses.append(" ) "); whereClauses.add(regionClauses.toString()); } if (options.containsKey("chr_pos") && !options.get("chr_pos").equals("")) { whereClauses.add("variant_stats.chromosome='" + options.get("chr_pos") + "'"); if (options.containsKey("start_pos") && !options.get("start_pos").equals("")) { whereClauses.add("variant_stats.position>=" + options.get("start_pos")); } if (options.containsKey("end_pos") && !options.get("end_pos").equals("")) { whereClauses.add("variant_stats.position<=" + options.get("end_pos")); } } if (options.containsKey("mend_error") && !options.get("mend_error").equals("")) { String val = options.get("mend_error"); String opt = options.get("option_mend_error"); whereClauses.add("variant_stats.mendel_err " + opt + " " + val); } if (options.containsKey("is_indel") && options.get("is_indel").equalsIgnoreCase("on")) { whereClauses.add("variant_stats.is_indel=1"); } if (options.containsKey("maf") && !options.get("maf").equals("")) { String val = options.get("maf"); String opt = options.get("option_maf"); whereClauses.add("variant_stats.maf " + opt + " " + val); } if (options.containsKey("mgf") && !options.get("mgf").equals("")) { String val = options.get("mgf"); String opt = options.get("option_mgf"); whereClauses.add("variant_stats.mgf " + opt + " " + val); } if (options.containsKey("miss_allele") && !options.get("miss_allele").equals("")) { String val = options.get("miss_allele"); String opt = options.get("option_miss_allele"); whereClauses.add("variant_stats.miss_allele " + opt + " " + val); } if (options.containsKey("miss_gt") && !options.get("miss_gt").equals("")) { String val = options.get("miss_gt"); String opt = options.get("option_miss_gt"); whereClauses.add("variant_stats.miss_gt " + opt + " " + val); } if (options.containsKey("cases_percent_dominant") && !options.get("cases_percent_dominant").equals("")) { String val = options.get("cases_percent_dominant"); String opt = options.get("option_cases_dom"); whereClauses.add("variant_stats.cases_percent_dominant " + opt + " " + val); } if (options.containsKey("controls_percent_dominant") && !options.get("controls_percent_dominant").equals("")) { String val = options.get("controls_percent_dominant"); String opt = options.get("option_controls_dom"); whereClauses.add("variant_stats.controls_percent_dominant " + opt + " " + val); } if (options.containsKey("cases_percent_recessive") && !options.get("cases_percent_recessive").equals("")) { String val = options.get("cases_percent_recessive"); String opt = options.get("option_cases_rec"); whereClauses.add("variant_stats.cases_percent_recessive " + opt + " " + val); } if (options.containsKey("controls_percent_recessive") && !options.get("controls_percent_recessive").equals("")) { String val = options.get("controls_percent_recessive"); String opt = options.get("option_controls_rec"); whereClauses.add("variant_stats.controls_percent_recessive " + opt + " " + val); } if (options.containsKey("biotype") && !options.get("biotype").equals("")) { String[] biotypes = options.get("biotype").split(","); StringBuilder biotypesClauses = new StringBuilder(" ( "); for (int i = 0; i < biotypes.length; i++) { biotypesClauses.append("variant_effect.feature_biotype LIKE '%").append(biotypes[i]).append("%'"); if (i < (biotypes.length - 1)) { biotypesClauses.append(" OR "); } } biotypesClauses.append(" ) "); whereClauses.add(biotypesClauses.toString()); } if (options.containsKey("exc_1000g_controls") && options.get("exc_1000g_controls").equalsIgnoreCase("on")) { whereClauses.add("(key NOT LIKE '1000G%' OR key is null)"); } else if (options.containsKey("maf_1000g_controls") && !options.get("maf_1000g_controls").equals("")) { controlsMAFs.put("1000G", options.get("maf_1000g_controls")); } if (options.containsKey("exc_bier_controls") && options.get("exc_bier_controls").equalsIgnoreCase("on")) { whereClauses.add("(key NOT LIKE 'BIER%' OR key is null)"); } else if (options.containsKey("maf_bier_controls") && !options.get("maf_bier_controls").equals("")) { controlsMAFs.put("BIER", options.get("maf_bier_controls")); } if (options.containsKey("exc_evs_controls") && options.get("exc_evs_controls").equalsIgnoreCase("on")) { whereClauses.add("(key NOT LIKE 'EVS%' OR key is null)"); } else if (options.containsKey("maf_evs_controls") && !options.get("maf_evs_controls").equals("")) { controlsMAFs.put("BIER", options.get("maf_evs_controls")); } if (options.containsKey("conseq_type[]") && !options.get("conseq_type[]").equals("")) { whereClauses.add(processConseqType(options.get("conseq_type[]"))); } if (options.containsKey("genes") && !options.get("genes").equals("")) { whereClauses.add(processGeneList(options.get("genes"))); // processGeneList(options.get("genes")); } if (sampleGenotypes.size() > 0) { StringBuilder sg = new StringBuilder(); int csg = 0; sg.append("("); for (Map.Entry<String, List<String>> entry : sampleGenotypes.entrySet()) { sg.append("("); sg.append("sample_name='").append(entry.getKey()).append("' AND ("); for (int i = 0; i < entry.getValue().size(); i++) { String[] aux = entry.getValue().get(i).split("/"); sg.append("("); sg.append("allele_1=").append(aux[0]).append(" AND allele_2=").append(aux[1]); sg.append(")"); if (i + 1 < entry.getValue().size()) { sg.append(" OR "); } } sg.append(")"); sg.append(" OR sample_name<>'").append(entry.getKey()).append("'"); sg.append(")"); if (csg + 1 < sampleGenotypes.entrySet().size()) { sg.append(" AND "); } csg++; } sg.append(")"); System.out.println(sg); whereClauses.add(sg.toString()); } String sql = "SELECT count(*) as count FROM sample ;"; stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); int numSamples = 0; while (rs.next()) { numSamples = rs.getInt("count"); } stmt.close(); System.out.println("controlsMAFs = " + controlsMAFs); System.out.println("sampleGenotypes = " + sampleGenotypes); String innerJoinVariantSQL = " left join variant_info on variant.id_variant=variant_info.id_variant "; // String innerJoinEffectSQL = " inner join variant_effect on variant_effect.chromosome=variant.chromosome AND variant_effect.position=variant.position AND variant_effect.reference_allele=variant.ref AND variant_effect.alternative_allele = variant.alt "; sql = "SELECT distinct variant.genes,variant.consequence_types, variant.id_variant, variant_info.key, variant_info.value, sample_info.sample_name, sample_info.allele_1, sample_info.allele_2, variant_stats.chromosome ," + "variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt , variant_stats.id , variant_stats.maf , variant_stats.mgf, " + "variant_stats.allele_maf , variant_stats.genotype_maf , variant_stats.miss_allele , variant_stats.miss_gt , variant_stats.mendel_err ," + "variant_stats.is_indel , variant_stats.cases_percent_dominant , variant_stats.controls_percent_dominant , variant_stats.cases_percent_recessive , variant_stats.controls_percent_recessive, " + "variant.polyphen_score, variant.polyphen_effect, variant.sift_score, variant.sift_effect " + " FROM variant_stats " + "inner join variant on variant_stats.chromosome=variant.chromosome AND variant_stats.position=variant.position AND variant_stats.allele_ref=variant.ref AND variant_stats.allele_alt=variant.alt " + "inner join sample_info on variant.id_variant=sample_info.id_variant " + innerJoinVariantSQL; if (whereClauses.size() > 0) { StringBuilder where = new StringBuilder(" where "); for (int i = 0; i < whereClauses.size(); i++) { where.append(whereClauses.get(i)); if (i < whereClauses.size() - 1) { where.append(" AND "); } } sql += where.toString() + " ORDER BY variant_stats.chromosome , variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt ;"; } System.out.println(sql); System.out.println("Start SQL"); long start = System.currentTimeMillis(); stmt = con.createStatement(); rs = stmt.executeQuery(sql); VariantStats vs; VariantInfo vi = null; String chr = ""; int pos = 0; String ref = "", alt = ""; System.out.println("End SQL: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); System.out.println("Processing"); while (rs.next()) { if (!rs.getString("chromosome").equals(chr) || rs.getInt("position") != pos || !rs.getString("allele_ref").equals(ref) || !rs.getString("allele_alt").equals(alt)) { chr = rs.getString("chromosome"); pos = rs.getInt("position"); ref = rs.getString("allele_ref"); alt = rs.getString("allele_alt"); if (vi != null && filterGenotypes(vi, numSamples) && filterControls(vi, controlsMAFs)) { list.add(vi); } vi = new VariantInfo(chr, pos, ref, alt); vs = new VariantStats(chr, pos, ref, alt, rs.getDouble("maf"), rs.getDouble("mgf"), rs.getString("allele_maf"), rs.getString("genotype_maf"), rs.getInt("miss_allele"), rs.getInt("miss_gt"), rs.getInt("mendel_err"), rs.getInt("is_indel") == 1, rs.getDouble("cases_percent_dominant"), rs.getDouble("controls_percent_dominant"), rs.getDouble("cases_percent_recessive"), rs.getDouble("controls_percent_recessive")); vs.setId(rs.getString("id")); // vi.addGenotypes(rs.getString("genotypes")); vi.addStats(vs); vi.addGenes(rs.getString("genes")); vi.addConsequenceTypes(rs.getString("consequence_types")); vi.setPolyphen_score(rs.getDouble("polyphen_score")); vi.setSift_score(rs.getDouble("sift_score")); vi.setPolyphen_effect(rs.getInt("polyphen_effect")); vi.setSift_effect(rs.getInt("sift_effect")); } if (rs.getString("key") != null && rs.getString("value") != null) { vi.addControl(rs.getString("key"), rs.getString("value")); } String sample = rs.getString("sample_name"); String gt = rs.getInt("allele_1") + "/" + rs.getInt("allele_2"); vi.addSammpleGenotype(sample, gt); // vi.addGeneAndConsequenceType(rs.getString("gene_name"), rs.getString("consequence_type_obo")); } if (vi != null && filterGenotypes(vi, numSamples) && filterControls(vi, controlsMAFs)) { list.add(vi); } stmt.close(); System.out.println("Total: (" + list.size() + ")"); System.out.println("End processing: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("STATS: " + e.getClass().getName() + ": " + e.getMessage()); } return list; } private void showDb(String dbName) { System.out.println("DB: " + dbName); } @Override public List<VariantStats> getRecordsStats(Map<String, String> options) { Connection con; Statement stmt; List<VariantStats> list = new ArrayList<>(100); String dbName = options.get("db_name"); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); List<String> whereClauses = new ArrayList<>(10); if (options.containsKey("region_list") && !options.get("region_list").equals("")) { StringBuilder regionClauses = new StringBuilder("("); String[] regions = options.get("region_list").split(","); Pattern pattern = Pattern.compile("(\\w+):(\\d+)-(\\d+)"); Matcher matcher; for (int i = 0; i < regions.length; i++) { String region = regions[i]; matcher = pattern.matcher(region); if (matcher.find()) { String chr = matcher.group(1); int start = Integer.valueOf(matcher.group(2)); int end = Integer.valueOf(matcher.group(3)); regionClauses.append("( variant_stats.chromosome='").append(chr).append("' AND "); regionClauses.append("variant_stats.position>=").append(start).append(" AND "); regionClauses.append("variant_stats.position<=").append(end).append(" )"); if (i < (regions.length - 1)) { regionClauses.append(" OR "); } } } regionClauses.append(" ) "); whereClauses.add(regionClauses.toString()); } if (options.containsKey("mend_error") && !options.get("mend_error").equals("")) { String val = options.get("mend_error"); String opt = options.get("option_mend_error"); whereClauses.add("variant_stats.mendel_err " + opt + " " + val); } if (options.containsKey("is_indel") && options.get("is_indel").equalsIgnoreCase("on")) { whereClauses.add("variant_stats.is_indel=1"); } if (options.containsKey("maf") && !options.get("maf").equals("")) { String val = options.get("maf"); String opt = options.get("option_maf"); whereClauses.add("variant_stats.maf " + opt + " " + val); } if (options.containsKey("mgf") && !options.get("mgf").equals("")) { String val = options.get("mgf"); String opt = options.get("option_mgf"); whereClauses.add("variant_stats.mgf " + opt + " " + val); } if (options.containsKey("miss_allele") && !options.get("miss_allele").equals("")) { String val = options.get("miss_allele"); String opt = options.get("option_miss_allele"); whereClauses.add("variant_stats.miss_allele " + opt + " " + val); } if (options.containsKey("miss_gt") && !options.get("miss_gt").equals("")) { String val = options.get("miss_gt"); String opt = options.get("option_miss_gt"); whereClauses.add("variant_stats.miss_gt " + opt + " " + val); } if (options.containsKey("cases_percent_dominant") && !options.get("cases_percent_dominant").equals("")) { String val = options.get("cases_percent_dominant"); String opt = options.get("option_cases_dom"); whereClauses.add("variant_stats.cases_percent_dominant " + opt + " " + val); } if (options.containsKey("controls_percent_dominant") && !options.get("controls_percent_dominant").equals("")) { String val = options.get("controls_percent_dominant"); String opt = options.get("option_controls_dom"); whereClauses.add("variant_stats.controls_percent_dominant " + opt + " " + val); } if (options.containsKey("cases_percent_recessive") && !options.get("cases_percent_recessive").equals("")) { String val = options.get("cases_percent_recessive"); String opt = options.get("option_cases_rec"); whereClauses.add("variant_stats.cases_percent_recessive " + opt + " " + val); } if (options.containsKey("controls_percent_recessive") && !options.get("controls_percent_recessive").equals("")) { String val = options.get("controls_percent_recessive"); String opt = options.get("option_controls_rec"); whereClauses.add("variant_stats.controls_percent_recessive " + opt + " " + val); } if (options.containsKey("genes") && !options.get("genes").equals("")) { whereClauses.add(processGeneList(options.get("genes"))); } String sql = "SELECT distinct variant_stats.chromosome ," + "variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt, variant_stats.maf , variant_stats.mgf, " + "variant_stats.allele_maf , variant_stats.genotype_maf , variant_stats.miss_allele , variant_stats.miss_gt , variant_stats.mendel_err ," + "variant_stats.is_indel , variant_stats.cases_percent_dominant , variant_stats.controls_percent_dominant , variant_stats.cases_percent_recessive , variant_stats.controls_percent_recessive" + " FROM variant_stats "; if (whereClauses.size() > 0) { StringBuilder where = new StringBuilder(" where "); for (int i = 0; i < whereClauses.size(); i++) { where.append(whereClauses.get(i)); if (i < whereClauses.size() - 1) { where.append(" AND "); } } sql += where.toString() + " ORDER BY variant_stats.chromosome , variant_stats.position , variant_stats.allele_ref ;"; } System.out.println(sql); System.out.println("Start SQL"); long start = System.currentTimeMillis(); stmt = con.createStatement(); stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); VariantStats vs; VariantInfo vi = null; String chr = ""; int pos = 0; String ref = "", alt = ""; System.out.println("End SQL: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); System.out.println("Processing"); while (rs.next()) { chr = rs.getString("chromosome"); pos = rs.getInt("position"); ref = rs.getString("allele_ref"); alt = rs.getString("allele_alt"); vs = new VariantStats(chr, pos, ref, alt, rs.getDouble("maf"), rs.getDouble("mgf"), rs.getString("allele_maf"), rs.getString("genotype_maf"), rs.getInt("miss_allele"), rs.getInt("miss_gt"), rs.getInt("mendel_err"), rs.getInt("is_indel") == 1, rs.getDouble("cases_percent_dominant"), rs.getDouble("controls_percent_dominant"), rs.getDouble("cases_percent_recessive"), rs.getDouble("controls_percent_recessive")); list.add(vs); } System.out.println("Total: (" + list.size() + ")"); System.out.println("End processing: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); stmt.close(); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("STATS: " + e.getClass().getName() + ": " + e.getMessage()); } return list; } @Override public List<VariantEffect> getEffect(Map<String, String> options) { Statement stmt; Connection con; List<VariantEffect> list = new ArrayList<>(100); String dbName = options.get("db_name"); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); String chr = options.get("chr"); int pos = Integer.valueOf(options.get("pos")); String ref = options.get("ref"); String alt = options.get("alt"); String sql = "SELECT * FROM variant_effect WHERE chromosome='" + chr + "' AND position=" + pos + " AND reference_allele='" + ref + "' AND alternative_allele='" + alt + "';"; System.out.println(sql); stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); VariantEffect ve; while (rs.next()) { ve = new VariantEffect(rs.getString("chromosome"), rs.getInt("position"), rs.getString("reference_allele"), rs.getString("alternative_allele"), rs.getString("feature_id"), rs.getString("feature_name"), rs.getString("feature_type"), rs.getString("feature_biotype"), rs.getString("feature_chromosome"), rs.getInt("feature_start"), rs.getInt("feature_end"), rs.getString("feature_strand"), rs.getString("snp_id"), rs.getString("ancestral"), rs.getString("alternative"), rs.getString("gene_id"), rs.getString("transcript_id"), rs.getString("gene_name"), rs.getString("consequence_type"), rs.getString("consequence_type_obo"), rs.getString("consequence_type_desc"), rs.getString("consequence_type_type"), rs.getInt("aa_position"), rs.getString("aminoacid_change"), rs.getString("codon_change")); ve.setPolyphenEffect(rs.getInt("polyphen_effect")); ve.setSiftEffect(rs.getInt("sift_effect")); ve.setPolyphenScore(rs.getDouble("polyphen_score")); ve.setSiftScore(rs.getDouble("sift_score")); list.add(ve); } stmt.close(); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("EFFECT: " + e.getClass().getName() + ": " + e.getMessage()); } return list; } @Override public VariantAnalysisInfo getAnalysisInfo(Map<String, String> options) { Statement stmt; Connection con; VariantAnalysisInfo vi = new VariantAnalysisInfo(); String dbName = options.get("db_name"); showDb(dbName); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); String sql = "SELECT * FROM sample ;"; stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); while (rs.next()) { vi.addSample(rs.getString("name")); } stmt.close(); sql = "select * from consequence_type_count"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { vi.addConsequenceType(rs.getString("consequence_type_obo"), rs.getInt("count")); } stmt.close(); sql = "select * from biotype_count;"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { vi.addBiotype(rs.getString("feature_biotype"), rs.getInt("count")); } stmt.close(); sql = "select * from global_stats"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { vi.addGlobalStats(rs.getString("name").toLowerCase(), rs.getDouble("value")); } stmt.close(); sql = "select count(*) as count, chromosome from variant group by chromosome"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { vi.addChromosome(rs.getString("chromosome"), rs.getInt("count")); } stmt.close(); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("ANALYSIS INFO: " + e.getClass().getName() + ": " + e.getMessage()); } return vi; } private String processGeneList(String genes) { System.out.println("genes = " + genes); List<String> list = new ArrayList<>(); // Client client = ClientBuilder.newClient(); // WebTarget webTarget = client.target("http://ws.bioinfo.cipf.es/cellbase/rest/latest/hsa/feature/gene/"); Client client = Client.create(); WebResource webResource = client.resource("http://ws.bioinfo.cipf.es/cellbase/rest/latest/hsa/feature/gene/"); ObjectMapper mapper = new ObjectMapper(); // Response response = webTarget.path(genes).path("info").queryParam("of", "json").request().get(); String response = webResource.path(genes).path("info").queryParam("of", "json").get(String.class); String data = response.toString(); System.out.println("response = " + response); try { JsonNode actualObj = mapper.readTree(data); Iterator<JsonNode> it = actualObj.iterator(); Iterator<JsonNode> aux; StringBuilder sb; while (it.hasNext()) { JsonNode node = it.next(); if (node.isArray()) { aux = node.iterator(); while (aux.hasNext()) { JsonNode auxNode = aux.next(); sb = new StringBuilder("("); System.out.println("auxNode.get(\"chromosome\").asText() = " + auxNode.get("chromosome").asText()); sb.append("variant_stats.chromosome='").append(auxNode.get("chromosome").asText()).append("' AND "); sb.append("variant_stats.position>=").append(auxNode.get("start")).append(" AND "); sb.append("variant_stats.position<=").append(auxNode.get("end")).append(" )"); list.add(sb.toString()); } } } } catch (IOException e) { e.printStackTrace(); } String res = "(" + StringUtils.join(list, " OR ") + ")"; return res; } private boolean filterControls(VariantInfo vi, Map<String, String> controlsMAFs) { boolean res = true; String key; VariantControl vc; float controlMAF; for (Map.Entry<String, VariantControl> entry : vi.getControls().entrySet()) { key = entry.getKey(); vc = entry.getValue(); if (controlsMAFs.containsKey(key)) { controlMAF = Float.valueOf(controlsMAFs.get(key)); if (vc.getMaf() > controlMAF) { return false; } } } return res; } private String processConseqType(String conseqType) { List<String> clauses = new ArrayList<>(10); String[] cts = conseqType.split(","); for (String ct : cts) { clauses.add("(variant.consequence_types LIKE '%" + ct + "%' )"); } String res = ""; if (clauses.size() > 0) { res = "(" + StringUtils.join(clauses, " OR ") + ")"; } return res; } private boolean filterGenotypes(VariantInfo variantInfo, int numSamples) { // if (variantInfo.getSampleGenotypes().size() != numSamples) { // return false; // } else { // return true; // } return variantInfo.getSampleGenotypes().size() == numSamples; } private Map<String, List<String>> processSamplesGT(Map<String, String> options) { Map<String, List<String>> samplesGenotypes = new LinkedHashMap<>(10); List<String> genotypesList; String key, val; for (Map.Entry<String, String> entry : options.entrySet()) { key = entry.getKey(); val = entry.getValue(); if (key.startsWith("sampleGT_")) { String sampleName = key.replace("sampleGT_", "").replace("[]", ""); String[] genotypes = val.split(","); if (samplesGenotypes.containsKey(sampleName)) { genotypesList = samplesGenotypes.get(sampleName); } else { genotypesList = new ArrayList<>(); samplesGenotypes.put(sampleName, genotypesList); } for (int i = 0; i < genotypes.length; i++) { genotypesList.add(genotypes[i]); } } } return samplesGenotypes; } private void processSamplesGT(Map<String, String> options, List<String> whereClauses) { String key, val; List<String> auxClauses = new ArrayList<>(); for (Map.Entry<String, String> entry : options.entrySet()) { key = entry.getKey(); val = entry.getValue(); if (key.startsWith("sampleGT_")) { String sampleName = key.replace("sampleGT_", "").replace("[]", ""); String[] genotypes = val.split(","); StringBuilder sb = new StringBuilder("("); for (int i = 0; i < genotypes.length; i++) { String[] gt = genotypes[i].split("_"); sb.append("("); sb.append("sample_info.sample_name='" + sampleName + "'"); sb.append(" AND sample_info.allele_1=" + gt[0]); sb.append(" AND sample_info.allele_2=" + gt[1]); sb.append(")"); if (i < genotypes.length - 1) { sb.append(" OR "); } } sb.append(")"); auxClauses.add(sb.toString()); } } if (auxClauses.size() > 0) { String finalSampleWhere = StringUtils.join(auxClauses, " AND "); whereClauses.add(finalSampleWhere); } } /* ****************************************** * Path and index checking * * ******************************************/ private Path getMetaDir(Path file) { String inputName = file.getFileName().toString(); return file.getParent().resolve(".meta_" + inputName); } }
opencga-storage/src/main/java/org/opencb/opencga/storage/variant/VariantSqliteQueryBuilder.java
package org.opencb.opencga.storage.variant; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.WebResource; import org.apache.commons.lang.StringUtils; import org.opencb.commons.bioformats.feature.Region; import org.opencb.commons.bioformats.variant.Variant; import org.opencb.commons.bioformats.variant.json.VariantAnalysisInfo; import org.opencb.commons.bioformats.variant.json.VariantControl; import org.opencb.commons.bioformats.variant.json.VariantInfo; import org.opencb.commons.bioformats.variant.utils.effect.VariantEffect; import org.opencb.commons.bioformats.variant.utils.stats.VariantStats; import org.opencb.commons.containers.QueryResult; import org.opencb.commons.containers.map.ObjectMap; import org.opencb.commons.containers.map.QueryOptions; import org.opencb.opencga.lib.auth.SqliteCredentials; import org.opencb.opencga.lib.common.XObject; import org.opencb.opencga.storage.indices.SqliteManager; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.*; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author Alejandro Aleman Ramos <[email protected]> * @author Cristina Yenyxe Gonzalez Garcia <[email protected]> */ public class VariantSqliteQueryBuilder implements VariantQueryBuilder { private SqliteCredentials sqliteCredentials; private SqliteManager sqliteManager; public VariantSqliteQueryBuilder() { System.out.println("Variant Query Maker"); } public VariantSqliteQueryBuilder(SqliteCredentials sqliteCredentials) { System.out.println("Variant Query Maker"); this.sqliteCredentials = sqliteCredentials; this.sqliteManager = new SqliteManager(); } @Override public QueryResult getAllVariantsByRegion(Region region, String studyName, QueryOptions options) { Connection con; Statement stmt; List<VariantInfo> list = new ArrayList<>(100); String dbName = (String) options.get("db_name"); showDb(dbName); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); List<String> whereClauses = new ArrayList<>(10); StringBuilder regionClauses = new StringBuilder(); regionClauses.append("( variant_stats.chromosome='").append(region.getChromosome()).append("' AND "); regionClauses.append("variant_stats.position>=").append(String.valueOf(region.getStart())).append(" AND "); regionClauses.append("variant_stats.position<=").append(String.valueOf(region.getStart())).append(" )"); regionClauses.append(" ) "); whereClauses.add(regionClauses.toString()); String sql = "SELECT count(*) as count FROM sample ;"; stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); int numSamples = 0; while (rs.next()) { numSamples = rs.getInt("count"); } stmt.close(); String innerJoinVariantSQL = " left join variant_info on variant.id_variant=variant_info.id_variant "; String innerJoinEffectSQL = " inner join variant_effect on variant_effect.chromosome=variant.chromosome AND variant_effect.position=variant.position AND variant_effect.reference_allele=variant.ref AND variant_effect.alternative_allele = variant.alt "; sql = "SELECT distinct variant.genes,variant.consequence_types, variant.id_variant, variant_info.key, variant_info.value, sample_info.sample_name, sample_info.allele_1, sample_info.allele_2, variant_stats.chromosome ," + "variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt , variant_stats.id , variant_stats.maf , variant_stats.mgf, " + "variant_stats.allele_maf , variant_stats.genotype_maf , variant_stats.miss_allele , variant_stats.miss_gt , variant_stats.mendel_err ," + "variant_stats.is_indel , variant_stats.cases_percent_dominant , variant_stats.controls_percent_dominant , variant_stats.cases_percent_recessive , variant_stats.controls_percent_recessive " + //, variant_stats.genotypes " + " FROM variant_stats " + "inner join variant on variant_stats.chromosome=variant.chromosome AND variant_stats.position=variant.position AND variant_stats.allele_ref=variant.ref AND variant_stats.allele_alt=variant.alt " + //innerJoinEffectSQL + "inner join sample_info on variant.id_variant=sample_info.id_variant " + innerJoinVariantSQL; if (whereClauses.size() > 0) { StringBuilder where = new StringBuilder(" where "); for (int i = 0; i < whereClauses.size(); i++) { where.append(whereClauses.get(i)); if (i < whereClauses.size() - 1) { where.append(" AND "); } } sql += where.toString() + " ORDER BY variant_stats.chromosome , variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt ;"; } System.out.println(sql); System.out.println("Start SQL"); long start = System.currentTimeMillis(); stmt = con.createStatement(); rs = stmt.executeQuery(sql); VariantStats vs; VariantInfo vi = null; String chr = ""; int pos = 0; String ref = "", alt = ""; System.out.println("End SQL: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); System.out.println("Processing"); while (rs.next()) { if (!rs.getString("chromosome").equals(chr) || rs.getInt("position") != pos || !rs.getString("allele_ref").equals(ref) || !rs.getString("allele_alt").equals(alt)) { chr = rs.getString("chromosome"); pos = rs.getInt("position"); ref = rs.getString("allele_ref"); alt = rs.getString("allele_alt"); // if (vi != null && filterGenotypes(vi, numSamples) && filterControls(vi, controlsMAFs)) { if (vi != null) { // Modified by Cristina list.add(vi); } vi = new VariantInfo(chr, pos, ref, alt); vs = new VariantStats(chr, pos, ref, alt, rs.getDouble("maf"), rs.getDouble("mgf"), rs.getString("allele_maf"), rs.getString("genotype_maf"), rs.getInt("miss_allele"), rs.getInt("miss_gt"), rs.getInt("mendel_err"), rs.getInt("is_indel") == 1, rs.getDouble("cases_percent_dominant"), rs.getDouble("controls_percent_dominant"), rs.getDouble("cases_percent_recessive"), rs.getDouble("controls_percent_recessive")); vs.setId(rs.getString("id")); // vi.addGenotypes(rs.getString("genotypes")); vi.addStats(vs); vi.addGenes(rs.getString("genes")); vi.addConsequenceTypes(rs.getString("consequence_types")); } if (rs.getString("key") != null && rs.getString("value") != null) { vi.addControl(rs.getString("key"), rs.getString("value")); } String sample = rs.getString("sample_name"); String gt = rs.getInt("allele_1") + "/" + rs.getInt("allele_2"); vi.addSammpleGenotype(sample, gt); // vi.addGeneAndConsequenceType(rs.getString("gene_name"), rs.getString("consequence_type_obo")); } // if (vi != null && filterGenotypes(vi, numSamples) && filterControls(vi, controlsMAFs)) { if (vi != null) { // Modified by Cristina list.add(vi); } stmt.close(); System.out.println("Total: (" + list.size() + ")"); System.out.println("End processing: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("STATS: " + e.getClass().getName() + ": " + e.getMessage()); } // return list; return new QueryResult(); } @Override public List<QueryResult> getAllVariantsByRegionList(List<Region> region, String studyName, QueryOptions options) { return null; // TODO Implementation needed } @Override public QueryResult<ObjectMap> getVariantsHistogramByRegion(Region region, String studyName, boolean histogramLogarithm, int histogramMax) { QueryResult<ObjectMap> queryResult = new QueryResult<>(String.format("%s:%d-%d", region.getChromosome(), region.getStart(), region.getEnd())); // TODO Fill metadata List<ObjectMap> data = new ArrayList<>(); long startTime = System.currentTimeMillis(); Path metaDir = getMetaDir(sqliteCredentials.getPath()); String fileName = sqliteCredentials.getPath().getFileName().toString(); try { long startDbTime = System.currentTimeMillis(); sqliteManager.connect(metaDir.resolve(Paths.get(fileName)), true); System.out.println("SQLite path: " + metaDir.resolve(Paths.get(fileName)).toString()); String queryString = "SELECT * FROM chunk WHERE chromosome='" + region.getChromosome() + "' AND start <= " + region.getEnd() + " AND end >= " + region.getStart(); List<XObject> queryResults = sqliteManager.query(queryString); sqliteManager.disconnect(true); queryResult.setDbTime(System.currentTimeMillis() - startDbTime); int resultSize = queryResults.size(); if (resultSize > histogramMax) { // Need to group results to fit maximum size of the histogram int sumChunkSize = resultSize / histogramMax; int i = 0, j = 0; int featuresCount = 0; ObjectMap item = null; for (XObject result : queryResults) { featuresCount += result.getInt("features_count"); if (i == 0) { item = new ObjectMap("chromosome", result.getString("chromosome")); item.put("chunkId", result.getInt("chunk_id")); item.put("start", result.getInt("start")); } else if (i == sumChunkSize - 1 || j == resultSize - 1) { if (histogramLogarithm) { item.put("featuresCount", (featuresCount > 0) ? Math.log(featuresCount) : 0); } else { item.put("featuresCount", featuresCount); } item.put("end", result.getInt("end")); data.add(item); i = -1; featuresCount = 0; } j++; i++; } } else { for (XObject result : queryResults) { ObjectMap item = new ObjectMap("chromosome", result.getString("chromosome")); item.put("chunkId", result.getInt("chunk_id")); item.put("start", result.getInt("start")); if (histogramLogarithm) { int features_count = result.getInt("features_count"); result.put("featuresCount", (features_count > 0) ? Math.log(features_count) : 0); } else { item.put("featuresCount", result.getInt("features_count")); } item.put("end", result.getInt("end")); data.add(item); } } } catch (ClassNotFoundException | SQLException ex ) { Logger.getLogger(VariantSqliteQueryBuilder.class.getName()).log(Level.SEVERE, null, ex); queryResult.setErrorMsg(ex.getMessage()); } queryResult.setResult(data); queryResult.setNumResults(data.size()); queryResult.setTime(System.currentTimeMillis() - startTime); return queryResult; } @Override public QueryResult getStatsByVariant(Variant variant, QueryOptions options) { return null; // TODO Implementation needed } @Override public QueryResult getSimpleStatsByVariant(Variant variant, QueryOptions options) { return null; // TODO Implementation needed } @Override public QueryResult getEffectsByVariant(Variant variant, QueryOptions options) { return null; //To change body of implemented methods use File | Settings | File Templates. } @Override public List<VariantInfo> getRecords(Map<String, String> options) { Connection con; Statement stmt; List<VariantInfo> list = new ArrayList<>(100); String dbName = options.get("db_name"); showDb(dbName); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); List<String> whereClauses = new ArrayList<>(10); Map<String, List<String>> sampleGenotypes; Map<String, String> controlsMAFs = new LinkedHashMap<>(); sampleGenotypes = processSamplesGT(options); if (options.containsKey("region_list") && !options.get("region_list").equals("")) { StringBuilder regionClauses = new StringBuilder("("); String[] regions = options.get("region_list").split(","); Pattern patternReg = Pattern.compile("(\\w+):(\\d+)-(\\d+)"); Matcher matcherReg, matcherChr; for (int i = 0; i < regions.length; i++) { String region = regions[i]; matcherReg = patternReg.matcher(region); if (matcherReg.find()) { String chr = matcherReg.group(1); int start = Integer.valueOf(matcherReg.group(2)); int end = Integer.valueOf(matcherReg.group(3)); regionClauses.append("( variant_stats.chromosome='").append(chr).append("' AND "); regionClauses.append("variant_stats.position>=").append(start).append(" AND "); regionClauses.append("variant_stats.position<=").append(end).append(" )"); if (i < (regions.length - 1)) { regionClauses.append(" OR "); } } else { Pattern patternChr = Pattern.compile("(\\w+)"); matcherChr = patternChr.matcher(region); if (matcherChr.find()) { String chr = matcherChr.group(); regionClauses.append("( variant_stats.chromosome='").append(chr).append("')"); if (i < (regions.length - 1)) { regionClauses.append(" OR "); } } else { System.err.println("ERROR: Region (" + region + ")"); } } } regionClauses.append(" ) "); whereClauses.add(regionClauses.toString()); } if (options.containsKey("chr_pos") && !options.get("chr_pos").equals("")) { whereClauses.add("variant_stats.chromosome='" + options.get("chr_pos") + "'"); if (options.containsKey("start_pos") && !options.get("start_pos").equals("")) { whereClauses.add("variant_stats.position>=" + options.get("start_pos")); } if (options.containsKey("end_pos") && !options.get("end_pos").equals("")) { whereClauses.add("variant_stats.position<=" + options.get("end_pos")); } } if (options.containsKey("mend_error") && !options.get("mend_error").equals("")) { String val = options.get("mend_error"); String opt = options.get("option_mend_error"); whereClauses.add("variant_stats.mendel_err " + opt + " " + val); } if (options.containsKey("is_indel") && options.get("is_indel").equalsIgnoreCase("on")) { whereClauses.add("variant_stats.is_indel=1"); } if (options.containsKey("maf") && !options.get("maf").equals("")) { String val = options.get("maf"); String opt = options.get("option_maf"); whereClauses.add("variant_stats.maf " + opt + " " + val); } if (options.containsKey("mgf") && !options.get("mgf").equals("")) { String val = options.get("mgf"); String opt = options.get("option_mgf"); whereClauses.add("variant_stats.mgf " + opt + " " + val); } if (options.containsKey("miss_allele") && !options.get("miss_allele").equals("")) { String val = options.get("miss_allele"); String opt = options.get("option_miss_allele"); whereClauses.add("variant_stats.miss_allele " + opt + " " + val); } if (options.containsKey("miss_gt") && !options.get("miss_gt").equals("")) { String val = options.get("miss_gt"); String opt = options.get("option_miss_gt"); whereClauses.add("variant_stats.miss_gt " + opt + " " + val); } if (options.containsKey("cases_percent_dominant") && !options.get("cases_percent_dominant").equals("")) { String val = options.get("cases_percent_dominant"); String opt = options.get("option_cases_dom"); whereClauses.add("variant_stats.cases_percent_dominant " + opt + " " + val); } if (options.containsKey("controls_percent_dominant") && !options.get("controls_percent_dominant").equals("")) { String val = options.get("controls_percent_dominant"); String opt = options.get("option_controls_dom"); whereClauses.add("variant_stats.controls_percent_dominant " + opt + " " + val); } if (options.containsKey("cases_percent_recessive") && !options.get("cases_percent_recessive").equals("")) { String val = options.get("cases_percent_recessive"); String opt = options.get("option_cases_rec"); whereClauses.add("variant_stats.cases_percent_recessive " + opt + " " + val); } if (options.containsKey("controls_percent_recessive") && !options.get("controls_percent_recessive").equals("")) { String val = options.get("controls_percent_recessive"); String opt = options.get("option_controls_rec"); whereClauses.add("variant_stats.controls_percent_recessive " + opt + " " + val); } if (options.containsKey("biotype") && !options.get("biotype").equals("")) { String[] biotypes = options.get("biotype").split(","); StringBuilder biotypesClauses = new StringBuilder(" ( "); for (int i = 0; i < biotypes.length; i++) { biotypesClauses.append("variant_effect.feature_biotype LIKE '%").append(biotypes[i]).append("%'"); if (i < (biotypes.length - 1)) { biotypesClauses.append(" OR "); } } biotypesClauses.append(" ) "); whereClauses.add(biotypesClauses.toString()); } if (options.containsKey("exc_1000g_controls") && options.get("exc_1000g_controls").equalsIgnoreCase("on")) { whereClauses.add("(key NOT LIKE '1000G%' OR key is null)"); } else if (options.containsKey("maf_1000g_controls") && !options.get("maf_1000g_controls").equals("")) { controlsMAFs.put("1000G", options.get("maf_1000g_controls")); } if (options.containsKey("exc_bier_controls") && options.get("exc_bier_controls").equalsIgnoreCase("on")) { whereClauses.add("(key NOT LIKE 'BIER%' OR key is null)"); } else if (options.containsKey("maf_bier_controls") && !options.get("maf_bier_controls").equals("")) { controlsMAFs.put("BIER", options.get("maf_bier_controls")); } if (options.containsKey("exc_evs_controls") && options.get("exc_evs_controls").equalsIgnoreCase("on")) { whereClauses.add("(key NOT LIKE 'EVS%' OR key is null)"); } else if (options.containsKey("maf_evs_controls") && !options.get("maf_evs_controls").equals("")) { controlsMAFs.put("BIER", options.get("maf_evs_controls")); } if (options.containsKey("conseq_type[]") && !options.get("conseq_type[]").equals("")) { whereClauses.add(processConseqType(options.get("conseq_type[]"))); } if (options.containsKey("genes") && !options.get("genes").equals("")) { whereClauses.add(processGeneList(options.get("genes"))); // processGeneList(options.get("genes")); } if (sampleGenotypes.size() > 0) { StringBuilder sg = new StringBuilder(); int csg = 0; sg.append("("); for (Map.Entry<String, List<String>> entry : sampleGenotypes.entrySet()) { sg.append("("); sg.append("sample_name='").append(entry.getKey()).append("' AND ("); for (int i = 0; i < entry.getValue().size(); i++) { String[] aux = entry.getValue().get(i).split("/"); sg.append("("); sg.append("allele_1=").append(aux[0]).append(" AND allele_2=").append(aux[1]); sg.append(")"); if (i + 1 < entry.getValue().size()) { sg.append(" OR "); } } sg.append(")"); sg.append(" OR sample_name<>'").append(entry.getKey()).append("'"); sg.append(")"); if (csg + 1 < sampleGenotypes.entrySet().size()) { sg.append(" AND "); } csg++; } sg.append(")"); System.out.println(sg); whereClauses.add(sg.toString()); } String sql = "SELECT count(*) as count FROM sample ;"; stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); int numSamples = 0; while (rs.next()) { numSamples = rs.getInt("count"); } stmt.close(); System.out.println("controlsMAFs = " + controlsMAFs); System.out.println("sampleGenotypes = " + sampleGenotypes); String innerJoinVariantSQL = " left join variant_info on variant.id_variant=variant_info.id_variant "; // String innerJoinEffectSQL = " inner join variant_effect on variant_effect.chromosome=variant.chromosome AND variant_effect.position=variant.position AND variant_effect.reference_allele=variant.ref AND variant_effect.alternative_allele = variant.alt "; sql = "SELECT distinct variant.genes,variant.consequence_types, variant.id_variant, variant_info.key, variant_info.value, sample_info.sample_name, sample_info.allele_1, sample_info.allele_2, variant_stats.chromosome ," + "variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt , variant_stats.id , variant_stats.maf , variant_stats.mgf, " + "variant_stats.allele_maf , variant_stats.genotype_maf , variant_stats.miss_allele , variant_stats.miss_gt , variant_stats.mendel_err ," + "variant_stats.is_indel , variant_stats.cases_percent_dominant , variant_stats.controls_percent_dominant , variant_stats.cases_percent_recessive , variant_stats.controls_percent_recessive, " + "variant.polyphen_score, variant.polyphen_effect, variant.sift_score, variant.sift_effect " + " FROM variant_stats " + "inner join variant on variant_stats.chromosome=variant.chromosome AND variant_stats.position=variant.position AND variant_stats.allele_ref=variant.ref AND variant_stats.allele_alt=variant.alt " + "inner join sample_info on variant.id_variant=sample_info.id_variant " + innerJoinVariantSQL; if (whereClauses.size() > 0) { StringBuilder where = new StringBuilder(" where "); for (int i = 0; i < whereClauses.size(); i++) { where.append(whereClauses.get(i)); if (i < whereClauses.size() - 1) { where.append(" AND "); } } sql += where.toString() + " ORDER BY variant_stats.chromosome , variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt ;"; } System.out.println(sql); System.out.println("Start SQL"); long start = System.currentTimeMillis(); stmt = con.createStatement(); rs = stmt.executeQuery(sql); VariantStats vs; VariantInfo vi = null; String chr = ""; int pos = 0; String ref = "", alt = ""; System.out.println("End SQL: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); System.out.println("Processing"); while (rs.next()) { if (!rs.getString("chromosome").equals(chr) || rs.getInt("position") != pos || !rs.getString("allele_ref").equals(ref) || !rs.getString("allele_alt").equals(alt)) { chr = rs.getString("chromosome"); pos = rs.getInt("position"); ref = rs.getString("allele_ref"); alt = rs.getString("allele_alt"); if (vi != null && filterGenotypes(vi, numSamples) && filterControls(vi, controlsMAFs)) { list.add(vi); } vi = new VariantInfo(chr, pos, ref, alt); vs = new VariantStats(chr, pos, ref, alt, rs.getDouble("maf"), rs.getDouble("mgf"), rs.getString("allele_maf"), rs.getString("genotype_maf"), rs.getInt("miss_allele"), rs.getInt("miss_gt"), rs.getInt("mendel_err"), rs.getInt("is_indel") == 1, rs.getDouble("cases_percent_dominant"), rs.getDouble("controls_percent_dominant"), rs.getDouble("cases_percent_recessive"), rs.getDouble("controls_percent_recessive")); vs.setId(rs.getString("id")); // vi.addGenotypes(rs.getString("genotypes")); vi.addStats(vs); vi.addGenes(rs.getString("genes")); vi.addConsequenceTypes(rs.getString("consequence_types")); vi.setPolyphen_score(rs.getDouble("polyphen_score")); vi.setSift_score(rs.getDouble("sift_score")); vi.setPolyphen_effect(rs.getInt("polyphen_effect")); vi.setSift_effect(rs.getInt("sift_effect")); } if (rs.getString("key") != null && rs.getString("value") != null) { vi.addControl(rs.getString("key"), rs.getString("value")); } String sample = rs.getString("sample_name"); String gt = rs.getInt("allele_1") + "/" + rs.getInt("allele_2"); vi.addSammpleGenotype(sample, gt); // vi.addGeneAndConsequenceType(rs.getString("gene_name"), rs.getString("consequence_type_obo")); } if (vi != null && filterGenotypes(vi, numSamples) && filterControls(vi, controlsMAFs)) { list.add(vi); } stmt.close(); System.out.println("Total: (" + list.size() + ")"); System.out.println("End processing: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("STATS: " + e.getClass().getName() + ": " + e.getMessage()); } return list; } private void showDb(String dbName) { System.out.println("DB: " + dbName); } @Override public List<VariantStats> getRecordsStats(Map<String, String> options) { Connection con; Statement stmt; List<VariantStats> list = new ArrayList<>(100); String dbName = options.get("db_name"); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); List<String> whereClauses = new ArrayList<>(10); if (options.containsKey("region_list") && !options.get("region_list").equals("")) { StringBuilder regionClauses = new StringBuilder("("); String[] regions = options.get("region_list").split(","); Pattern pattern = Pattern.compile("(\\w+):(\\d+)-(\\d+)"); Matcher matcher; for (int i = 0; i < regions.length; i++) { String region = regions[i]; matcher = pattern.matcher(region); if (matcher.find()) { String chr = matcher.group(1); int start = Integer.valueOf(matcher.group(2)); int end = Integer.valueOf(matcher.group(3)); regionClauses.append("( variant_stats.chromosome='").append(chr).append("' AND "); regionClauses.append("variant_stats.position>=").append(start).append(" AND "); regionClauses.append("variant_stats.position<=").append(end).append(" )"); if (i < (regions.length - 1)) { regionClauses.append(" OR "); } } } regionClauses.append(" ) "); whereClauses.add(regionClauses.toString()); } if (options.containsKey("mend_error") && !options.get("mend_error").equals("")) { String val = options.get("mend_error"); String opt = options.get("option_mend_error"); whereClauses.add("variant_stats.mendel_err " + opt + " " + val); } if (options.containsKey("is_indel") && options.get("is_indel").equalsIgnoreCase("on")) { whereClauses.add("variant_stats.is_indel=1"); } if (options.containsKey("maf") && !options.get("maf").equals("")) { String val = options.get("maf"); String opt = options.get("option_maf"); whereClauses.add("variant_stats.maf " + opt + " " + val); } if (options.containsKey("mgf") && !options.get("mgf").equals("")) { String val = options.get("mgf"); String opt = options.get("option_mgf"); whereClauses.add("variant_stats.mgf " + opt + " " + val); } if (options.containsKey("miss_allele") && !options.get("miss_allele").equals("")) { String val = options.get("miss_allele"); String opt = options.get("option_miss_allele"); whereClauses.add("variant_stats.miss_allele " + opt + " " + val); } if (options.containsKey("miss_gt") && !options.get("miss_gt").equals("")) { String val = options.get("miss_gt"); String opt = options.get("option_miss_gt"); whereClauses.add("variant_stats.miss_gt " + opt + " " + val); } if (options.containsKey("cases_percent_dominant") && !options.get("cases_percent_dominant").equals("")) { String val = options.get("cases_percent_dominant"); String opt = options.get("option_cases_dom"); whereClauses.add("variant_stats.cases_percent_dominant " + opt + " " + val); } if (options.containsKey("controls_percent_dominant") && !options.get("controls_percent_dominant").equals("")) { String val = options.get("controls_percent_dominant"); String opt = options.get("option_controls_dom"); whereClauses.add("variant_stats.controls_percent_dominant " + opt + " " + val); } if (options.containsKey("cases_percent_recessive") && !options.get("cases_percent_recessive").equals("")) { String val = options.get("cases_percent_recessive"); String opt = options.get("option_cases_rec"); whereClauses.add("variant_stats.cases_percent_recessive " + opt + " " + val); } if (options.containsKey("controls_percent_recessive") && !options.get("controls_percent_recessive").equals("")) { String val = options.get("controls_percent_recessive"); String opt = options.get("option_controls_rec"); whereClauses.add("variant_stats.controls_percent_recessive " + opt + " " + val); } if (options.containsKey("genes") && !options.get("genes").equals("")) { whereClauses.add(processGeneList(options.get("genes"))); } String sql = "SELECT distinct variant_stats.chromosome ," + "variant_stats.position , variant_stats.allele_ref , variant_stats.allele_alt, variant_stats.maf , variant_stats.mgf, " + "variant_stats.allele_maf , variant_stats.genotype_maf , variant_stats.miss_allele , variant_stats.miss_gt , variant_stats.mendel_err ," + "variant_stats.is_indel , variant_stats.cases_percent_dominant , variant_stats.controls_percent_dominant , variant_stats.cases_percent_recessive , variant_stats.controls_percent_recessive" + " FROM variant_stats "; if (whereClauses.size() > 0) { StringBuilder where = new StringBuilder(" where "); for (int i = 0; i < whereClauses.size(); i++) { where.append(whereClauses.get(i)); if (i < whereClauses.size() - 1) { where.append(" AND "); } } sql += where.toString() + " ORDER BY variant_stats.chromosome , variant_stats.position , variant_stats.allele_ref ;"; } System.out.println(sql); System.out.println("Start SQL"); long start = System.currentTimeMillis(); stmt = con.createStatement(); stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); VariantStats vs; VariantInfo vi = null; String chr = ""; int pos = 0; String ref = "", alt = ""; System.out.println("End SQL: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); System.out.println("Processing"); while (rs.next()) { chr = rs.getString("chromosome"); pos = rs.getInt("position"); ref = rs.getString("allele_ref"); alt = rs.getString("allele_alt"); vs = new VariantStats(chr, pos, ref, alt, rs.getDouble("maf"), rs.getDouble("mgf"), rs.getString("allele_maf"), rs.getString("genotype_maf"), rs.getInt("miss_allele"), rs.getInt("miss_gt"), rs.getInt("mendel_err"), rs.getInt("is_indel") == 1, rs.getDouble("cases_percent_dominant"), rs.getDouble("controls_percent_dominant"), rs.getDouble("cases_percent_recessive"), rs.getDouble("controls_percent_recessive")); list.add(vs); } System.out.println("Total: (" + list.size() + ")"); System.out.println("End processing: " + ((System.currentTimeMillis() - start) / 1000.0) + " s."); stmt.close(); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("STATS: " + e.getClass().getName() + ": " + e.getMessage()); } return list; } @Override public List<VariantEffect> getEffect(Map<String, String> options) { Statement stmt; Connection con; List<VariantEffect> list = new ArrayList<>(100); String dbName = options.get("db_name"); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); String chr = options.get("chr"); int pos = Integer.valueOf(options.get("pos")); String ref = options.get("ref"); String alt = options.get("alt"); String sql = "SELECT * FROM variant_effect WHERE chromosome='" + chr + "' AND position=" + pos + " AND reference_allele='" + ref + "' AND alternative_allele='" + alt + "';"; System.out.println(sql); stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); VariantEffect ve; while (rs.next()) { ve = new VariantEffect(rs.getString("chromosome"), rs.getInt("position"), rs.getString("reference_allele"), rs.getString("alternative_allele"), rs.getString("feature_id"), rs.getString("feature_name"), rs.getString("feature_type"), rs.getString("feature_biotype"), rs.getString("feature_chromosome"), rs.getInt("feature_start"), rs.getInt("feature_end"), rs.getString("feature_strand"), rs.getString("snp_id"), rs.getString("ancestral"), rs.getString("alternative"), rs.getString("gene_id"), rs.getString("transcript_id"), rs.getString("gene_name"), rs.getString("consequence_type"), rs.getString("consequence_type_obo"), rs.getString("consequence_type_desc"), rs.getString("consequence_type_type"), rs.getInt("aa_position"), rs.getString("aminoacid_change"), rs.getString("codon_change")); ve.setPolyphenEffect(rs.getInt("polyphen_effect")); ve.setSiftEffect(rs.getInt("sift_effect")); ve.setPolyphenScore(rs.getDouble("polyphen_score")); ve.setSiftScore(rs.getDouble("sift_score")); list.add(ve); } stmt.close(); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("EFFECT: " + e.getClass().getName() + ": " + e.getMessage()); } return list; } @Override public VariantAnalysisInfo getAnalysisInfo(Map<String, String> options) { Statement stmt; Connection con; VariantAnalysisInfo vi = new VariantAnalysisInfo(); String dbName = options.get("db_name"); showDb(dbName); try { Class.forName("org.sqlite.JDBC"); con = DriverManager.getConnection("jdbc:sqlite:" + dbName); String sql = "SELECT * FROM sample ;"; stmt = con.createStatement(); ResultSet rs = stmt.executeQuery(sql); while (rs.next()) { vi.addSample(rs.getString("name")); } stmt.close(); sql = "select * from consequence_type_count"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { vi.addConsequenceType(rs.getString("consequence_type_obo"), rs.getInt("count")); } stmt.close(); sql = "select * from biotype_count;"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { vi.addBiotype(rs.getString("feature_biotype"), rs.getInt("count")); } stmt.close(); sql = "select * from global_stats"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { vi.addGlobalStats(rs.getString("name").toLowerCase(), rs.getDouble("value")); } stmt.close(); sql = "select count(*) as count, chromosome from variant group by chromosome"; stmt = con.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { vi.addChromosome(rs.getString("chromosome"), rs.getInt("count")); } stmt.close(); con.close(); } catch (ClassNotFoundException | SQLException e) { System.err.println("ANALYSIS INFO: " + e.getClass().getName() + ": " + e.getMessage()); } return vi; } private String processGeneList(String genes) { System.out.println("genes = " + genes); List<String> list = new ArrayList<>(); // Client client = ClientBuilder.newClient(); // WebTarget webTarget = client.target("http://ws.bioinfo.cipf.es/cellbase/rest/latest/hsa/feature/gene/"); Client client = Client.create(); WebResource webResource = client.resource("http://ws.bioinfo.cipf.es/cellbase/rest/latest/hsa/feature/gene/"); ObjectMapper mapper = new ObjectMapper(); // Response response = webTarget.path(genes).path("info").queryParam("of", "json").request().get(); String response = webResource.path(genes).path("info").queryParam("of", "json").get(String.class); String data = response.toString(); System.out.println("response = " + response); try { JsonNode actualObj = mapper.readTree(data); Iterator<JsonNode> it = actualObj.iterator(); Iterator<JsonNode> aux; StringBuilder sb; while (it.hasNext()) { JsonNode node = it.next(); if (node.isArray()) { aux = node.iterator(); while (aux.hasNext()) { JsonNode auxNode = aux.next(); sb = new StringBuilder("("); System.out.println("auxNode.get(\"chromosome\").asText() = " + auxNode.get("chromosome").asText()); sb.append("variant_stats.chromosome='").append(auxNode.get("chromosome").asText()).append("' AND "); sb.append("variant_stats.position>=").append(auxNode.get("start")).append(" AND "); sb.append("variant_stats.position<=").append(auxNode.get("end")).append(" )"); list.add(sb.toString()); } } } } catch (IOException e) { e.printStackTrace(); } String res = "(" + StringUtils.join(list, " OR ") + ")"; return res; } private boolean filterControls(VariantInfo vi, Map<String, String> controlsMAFs) { boolean res = true; String key; VariantControl vc; float controlMAF; for (Map.Entry<String, VariantControl> entry : vi.getControls().entrySet()) { key = entry.getKey(); vc = entry.getValue(); if (controlsMAFs.containsKey(key)) { controlMAF = Float.valueOf(controlsMAFs.get(key)); if (vc.getMaf() > controlMAF) { return false; } } } return res; } private String processConseqType(String conseqType) { List<String> clauses = new ArrayList<>(10); String[] cts = conseqType.split(","); for (String ct : cts) { clauses.add("(variant.consequence_types LIKE '%" + ct + "%' )"); } String res = ""; if (clauses.size() > 0) { res = "(" + StringUtils.join(clauses, " OR ") + ")"; } return res; } private boolean filterGenotypes(VariantInfo variantInfo, int numSamples) { // if (variantInfo.getSampleGenotypes().size() != numSamples) { // return false; // } else { // return true; // } return variantInfo.getSampleGenotypes().size() == numSamples; } private Map<String, List<String>> processSamplesGT(Map<String, String> options) { Map<String, List<String>> samplesGenotypes = new LinkedHashMap<>(10); List<String> genotypesList; String key, val; for (Map.Entry<String, String> entry : options.entrySet()) { key = entry.getKey(); val = entry.getValue(); if (key.startsWith("sampleGT_")) { String sampleName = key.replace("sampleGT_", "").replace("[]", ""); String[] genotypes = val.split(","); if (samplesGenotypes.containsKey(sampleName)) { genotypesList = samplesGenotypes.get(sampleName); } else { genotypesList = new ArrayList<>(); samplesGenotypes.put(sampleName, genotypesList); } for (int i = 0; i < genotypes.length; i++) { genotypesList.add(genotypes[i]); } } } return samplesGenotypes; } private void processSamplesGT(Map<String, String> options, List<String> whereClauses) { String key, val; List<String> auxClauses = new ArrayList<>(); for (Map.Entry<String, String> entry : options.entrySet()) { key = entry.getKey(); val = entry.getValue(); if (key.startsWith("sampleGT_")) { String sampleName = key.replace("sampleGT_", "").replace("[]", ""); String[] genotypes = val.split(","); StringBuilder sb = new StringBuilder("("); for (int i = 0; i < genotypes.length; i++) { String[] gt = genotypes[i].split("_"); sb.append("("); sb.append("sample_info.sample_name='" + sampleName + "'"); sb.append(" AND sample_info.allele_1=" + gt[0]); sb.append(" AND sample_info.allele_2=" + gt[1]); sb.append(")"); if (i < genotypes.length - 1) { sb.append(" OR "); } } sb.append(")"); auxClauses.add(sb.toString()); } } if (auxClauses.size() > 0) { String finalSampleWhere = StringUtils.join(auxClauses, " AND "); whereClauses.add(finalSampleWhere); } } /* ****************************************** * Path and index checking * * ******************************************/ private Path getMetaDir(Path file) { String inputName = file.getFileName().toString(); return file.getParent().resolve(".meta_" + inputName); } }
Added username to the TODO comments
opencga-storage/src/main/java/org/opencb/opencga/storage/variant/VariantSqliteQueryBuilder.java
Added username to the TODO comments
<ide><path>pencga-storage/src/main/java/org/opencb/opencga/storage/variant/VariantSqliteQueryBuilder.java <ide> <ide> @Override <ide> public List<QueryResult> getAllVariantsByRegionList(List<Region> region, String studyName, QueryOptions options) { <del> return null; // TODO Implementation needed <add> return null; // TODO aaleman: Implementation needed <ide> } <ide> <ide> @Override <ide> <ide> @Override <ide> public QueryResult getStatsByVariant(Variant variant, QueryOptions options) { <del> return null; // TODO Implementation needed <add> return null; // TODO aaleman: Implementation needed <ide> } <ide> <ide> @Override <ide> public QueryResult getSimpleStatsByVariant(Variant variant, QueryOptions options) { <del> return null; // TODO Implementation needed <add> return null; // TODO aaleman: Implementation needed <ide> } <ide> <ide> @Override <ide> public QueryResult getEffectsByVariant(Variant variant, QueryOptions options) { <del> return null; //To change body of implemented methods use File | Settings | File Templates. <add> return null; // TODO aaleman: Implementation needed <ide> } <ide> <ide> @Override
Java
apache-2.0
66297e3811be19f83d3d640d2422f89f5d15ca63
0
globocom/GloboDNS-Client
package com.globo.dnsapi.api; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import com.globo.dnsapi.TestRequestProcessor; import com.globo.dnsapi.TestRequestProcessor.HttpMethod; import com.globo.dnsapi.exception.DNSAPIException; import com.globo.dnsapi.model.Domain; @RunWith(JUnit4.class) public class DomainAPITest { private DomainAPI domainAPI; private TestRequestProcessor rp; @Before public void setUp() { this.rp = new TestRequestProcessor(); this.domainAPI = this.rp.getDomainAPI(); } @Test(expected=DNSAPIException.class) public void testMissingToken() throws DNSAPIException { this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json", 401, "{\"error\":\"You need to sign in or sign up before continuing.\"}"); this.domainAPI.listAll(); } @Test(expected=DNSAPIException.class) public void testInvalidToken() throws DNSAPIException { this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json", 401, "{\"error\":\"Invalid authentication token.\"}"); this.domainAPI.listAll(); } @Test public void testGetById() throws DNSAPIException { Long domainId = 10L; this.rp.registerFakeRequest(HttpMethod.GET, "/domains/" + domainId + ".json", "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-11T17:31:58Z\",\"id\":10,\"last_check\":null,\"master\":null,\"name\":\"anydomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-11T17:38:40Z\",\"user_id\":null,\"view_id\":null}}"); Domain domain = this.domainAPI.getById(domainId); assertNotNull(domain); assertEquals(domainId, domain.getId()); } @Test public void testListByName() throws DNSAPIException { String domainName = "anydomain.com"; this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json?query=" + domainName, "[{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-11T17:31:58Z\",\"id\":0,\"last_check\":null,\"master\":null,\"name\":\"anydomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-11T17:38:40Z\",\"user_id\":null,\"view_id\":null}}]"); List<Domain> domainList = this.domainAPI.listByQuery(domainName); assertNotNull(domainList); assertEquals(1, domainList.size()); Domain domain = domainList.get(0); assertEquals(Long.valueOf(0), domain.getId()); assertEquals(domainName, domain.getName()); } @Test public void testListReverseByName() throws DNSAPIException { String reverseDomain = "10.10.10.in-addr.arpa"; this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json?query=" + reverseDomain + "&reverse=true", "[{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2014-03-11T17:31:58Z\",\"id\":0,\"last_check\":null,\"master\":null,\"name\":\"10.10.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-11T17:38:40Z\",\"user_id\":null,\"view_id\":null}}]"); List<Domain> domainList = this.domainAPI.listReverseByQuery(reverseDomain); assertNotNull(domainList); assertEquals(1, domainList.size()); Domain domain = domainList.get(0); assertEquals(Long.valueOf(0), domain.getId()); assertEquals(reverseDomain, domain.getName()); } @Test public void testListByNameDomainDoesntExist() throws DNSAPIException { String domainName = "unexistant.dev.globoi.com"; this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json?query=" + domainName, "[]"); List<Domain> domainList = this.domainAPI.listByQuery(domainName); assertNotNull(domainList); assertEquals(0, domainList.size()); } @Test public void testListAll() throws DNSAPIException { this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json", "[{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-25T01:04:05Z\",\"id\":1,\"last_check\":null,\"master\":null,\"name\":\"firstdomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-25T10:05:02Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2013-08-22T19:48:54Z\",\"id\":2,\"last_check\":null,\"master\":null,\"name\":\"seconddomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2013-08-22T19:48:54Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2013-08-13T18:38:37Z\",\"id\":3,\"last_check\":null,\"master\":null,\"name\":\"thirddomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"86400\",\"updated_at\":\"2013-08-13T18:38:37Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-11T17:31:58Z\",\"id\":4,\"last_check\":null,\"master\":null,\"name\":\"fourthdomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-11T17:38:40Z\",\"user_id\":null,\"view_id\":null}}]"); List<Domain> domainList = this.domainAPI.listAll(); assertNotNull(domainList); assertEquals(4, domainList.size()); Domain domain1 = domainList.get(0); assertEquals(Long.valueOf(1), domain1.getId()); assertEquals("N", domain1.getAddressType()); assertEquals("firstdomain.com", domain1.getName()); Domain domain2 = domainList.get(1); assertEquals(Long.valueOf(2), domain2.getId()); assertEquals("N", domain2.getAddressType()); assertEquals("seconddomain.com", domain2.getName()); Domain domain3 = domainList.get(2); assertEquals(Long.valueOf(3), domain3.getId()); assertEquals("N", domain3.getAddressType()); assertEquals("thirddomain.com", domain3.getName()); Domain domain4 = domainList.get(3); assertEquals(Long.valueOf(4), domain4.getId()); assertEquals("N", domain4.getAddressType()); assertEquals("fourthdomain.com", domain4.getName()); } @Test public void testListAllReverse() throws DNSAPIException { this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json?reverse=true", "[{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2013-08-13T18:46:25Z\",\"id\":1,\"last_check\":null,\"master\":null,\"name\":\"0.11.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"3H\",\"updated_at\":\"2013-08-13T18:46:25Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2013-08-13T18:46:25Z\",\"id\":2,\"last_check\":null,\"master\":null,\"name\":\"0.17.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"3H\",\"updated_at\":\"2013-08-13T18:46:25Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2013-08-13T18:46:25Z\",\"id\":3,\"last_check\":null,\"master\":null,\"name\":\"0.170.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"3H\",\"updated_at\":\"2013-08-13T18:46:25Z\",\"user_id\":null,\"view_id\":null}}]"); List<Domain> domainList = this.domainAPI.listAllReverse(); assertNotNull(domainList); assertEquals(3, domainList.size()); Domain domain1 = domainList.get(0); assertEquals(Long.valueOf(1), domain1.getId()); assertEquals("R", domain1.getAddressType()); assertEquals("0.11.10.in-addr.arpa", domain1.getName()); Domain domain2 = domainList.get(1); assertEquals(Long.valueOf(2), domain2.getId()); assertEquals("R", domain2.getAddressType()); assertEquals("0.17.10.in-addr.arpa", domain2.getName()); Domain domain3 = domainList.get(2); assertEquals(Long.valueOf(3), domain3.getId()); assertEquals("R", domain3.getAddressType()); assertEquals("0.170.10.in-addr.arpa", domain3.getName()); } @Test public void testCreateDomain() throws DNSAPIException { String newDomainName = "newdomain.com"; String newAuthType = "M"; this.rp.registerFakeRequest(HttpMethod.POST, "/domains.json", "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-25T01:04:05Z\",\"id\":100,\"last_check\":null,\"master\":null,\"name\":\"newdomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-25T01:04:05Z\",\"user_id\":null,\"view_id\":null}}"); Domain createdDomain = this.domainAPI.createDomain(newDomainName, 1L, newAuthType); assertNotNull(createdDomain); assertEquals(newDomainName, createdDomain.getName()); assertEquals(newAuthType, createdDomain.getAuthorityType()); } @Test(expected=DNSAPIException.class) public void testCreateDomainAlreadyExists() throws DNSAPIException { String newDomainName = "newdomain.com"; String newAuthType = "M"; this.rp.registerFakeRequest(HttpMethod.POST, "/domains.json", 422, "{\"errors\":{\"name\":[\"has already been taken\"]}}"); this.domainAPI.createDomain(newDomainName, 1L, newAuthType); } @Test public void testCreateReverseDomain() throws DNSAPIException { String newReverseDomainName = "0.10.10.in-addr.arpa"; String newReverseAuthType = "M"; this.rp.registerFakeRequest(HttpMethod.POST, "/domains.json?reverse=true", "{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2014-03-28T20:40:34Z\",\"id\":20,\"last_check\":null,\"master\":null,\"name\":\"0.10.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-28T20:40:34Z\",\"user_id\":null,\"view_id\":null}}"); Domain createdReverseDomain = this.domainAPI.createReverseDomain(newReverseDomainName, 1L, newReverseAuthType); assertNotNull(createdReverseDomain); assertEquals(newReverseDomainName, createdReverseDomain.getName()); assertEquals(newReverseAuthType, createdReverseDomain.getAuthorityType()); } }
src/test/java/com/globo/dnsapi/api/DomainAPITest.java
package com.globo.dnsapi.api; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import com.globo.dnsapi.TestRequestProcessor; import com.globo.dnsapi.TestRequestProcessor.HttpMethod; import com.globo.dnsapi.exception.DNSAPIException; import com.globo.dnsapi.model.Domain; @RunWith(JUnit4.class) public class DomainAPITest { private DomainAPI domainAPI; private TestRequestProcessor rp; @Before public void setUp() { this.rp = new TestRequestProcessor(); this.domainAPI = this.rp.getDomainAPI(); } @Test(expected=DNSAPIException.class) public void testMissingToken() throws DNSAPIException { this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json", 401, "{\"error\":\"You need to sign in or sign up before continuing.\"}"); this.domainAPI.listAll(); } @Test(expected=DNSAPIException.class) public void testInvalidToken() throws DNSAPIException { this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json", 401, "{\"error\":\"Invalid authentication token.\"}"); this.domainAPI.listAll(); } @Test public void testGetById() throws DNSAPIException { Long domainId = 10L; this.rp.registerFakeRequest(HttpMethod.GET, "/domains/" + domainId + ".json", "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-11T17:31:58Z\",\"id\":10,\"last_check\":null,\"master\":null,\"name\":\"anydomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-11T17:38:40Z\",\"user_id\":null,\"view_id\":null}}"); Domain domain = this.domainAPI.getById(domainId); assertNotNull(domain); assertEquals(domainId, domain.getId()); } @Test public void testListByName() throws DNSAPIException { String domainName = "anydomain.com"; this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json?query=" + domainName, "[{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-11T17:31:58Z\",\"id\":0,\"last_check\":null,\"master\":null,\"name\":\"anydomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-11T17:38:40Z\",\"user_id\":null,\"view_id\":null}}]"); List<Domain> domainList = this.domainAPI.listByQuery(domainName); assertNotNull(domainList); assertEquals(1, domainList.size()); Domain domain = domainList.get(0); assertEquals(Long.valueOf(0), domain.getId()); assertEquals(domainName, domain.getName()); } @Test public void testListReverseByName() throws DNSAPIException { String reverseDomain = "10.10.10.in-addr.arpa"; this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json?query=" + reverseDomain + "&reverse=true", "[{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2014-03-11T17:31:58Z\",\"id\":0,\"last_check\":null,\"master\":null,\"name\":\"10.10.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-11T17:38:40Z\",\"user_id\":null,\"view_id\":null}}]"); List<Domain> domainList = this.domainAPI.listReverseByQuery(reverseDomain); assertNotNull(domainList); assertEquals(1, domainList.size()); Domain domain = domainList.get(0); assertEquals(Long.valueOf(0), domain.getId()); assertEquals(reverseDomain, domain.getName()); } @Test public void testListByNameDomainDoesntExist() throws DNSAPIException { String domainName = "unexistant.dev.globoi.com"; this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json?query=" + domainName, "[]"); List<Domain> domainList = this.domainAPI.listByQuery(domainName); assertNotNull(domainList); assertEquals(0, domainList.size()); } @Test public void testListAll() throws DNSAPIException { this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json", "[{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-25T01:04:05Z\",\"id\":1,\"last_check\":null,\"master\":null,\"name\":\"firstdomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-25T10:05:02Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2013-08-22T19:48:54Z\",\"id\":2,\"last_check\":null,\"master\":null,\"name\":\"seconddomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2013-08-22T19:48:54Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2013-08-13T18:38:37Z\",\"id\":3,\"last_check\":null,\"master\":null,\"name\":\"thirddomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"86400\",\"updated_at\":\"2013-08-13T18:38:37Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-11T17:31:58Z\",\"id\":4,\"last_check\":null,\"master\":null,\"name\":\"fourthdomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-11T17:38:40Z\",\"user_id\":null,\"view_id\":null}}]"); List<Domain> domainList = this.domainAPI.listAll(); assertNotNull(domainList); assertEquals(4, domainList.size()); Domain domain1 = domainList.get(0); assertEquals(Long.valueOf(1), domain1.getId()); assertEquals("N", domain1.getAddressType()); assertEquals("firstdomain.com", domain1.getName()); Domain domain2 = domainList.get(1); assertEquals(Long.valueOf(2), domain2.getId()); assertEquals("N", domain2.getAddressType()); assertEquals("seconddomain.com", domain2.getName()); Domain domain3 = domainList.get(2); assertEquals(Long.valueOf(3), domain3.getId()); assertEquals("N", domain3.getAddressType()); assertEquals("thirddomain.com", domain3.getName()); Domain domain4 = domainList.get(3); assertEquals(Long.valueOf(4), domain4.getId()); assertEquals("N", domain4.getAddressType()); assertEquals("fourthdomain.com", domain4.getName()); } @Test public void testListAllReverse() throws DNSAPIException { this.rp.registerFakeRequest(HttpMethod.GET, "/domains.json?reverse=true", "[{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2013-08-13T18:46:25Z\",\"id\":1,\"last_check\":null,\"master\":null,\"name\":\"0.11.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"3H\",\"updated_at\":\"2013-08-13T18:46:25Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2013-08-13T18:46:25Z\",\"id\":2,\"last_check\":null,\"master\":null,\"name\":\"0.17.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"3H\",\"updated_at\":\"2013-08-13T18:46:25Z\",\"user_id\":null,\"view_id\":null}}," + "{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2013-08-13T18:46:25Z\",\"id\":3,\"last_check\":null,\"master\":null,\"name\":\"0.170.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"3H\",\"updated_at\":\"2013-08-13T18:46:25Z\",\"user_id\":null,\"view_id\":null}}]"); List<Domain> domainList = this.domainAPI.listAllReverse(); assertNotNull(domainList); assertEquals(3, domainList.size()); Domain domain1 = domainList.get(0); assertEquals(Long.valueOf(1), domain1.getId()); assertEquals("R", domain1.getAddressType()); assertEquals("0.11.10.in-addr.arpa", domain1.getName()); Domain domain2 = domainList.get(1); assertEquals(Long.valueOf(2), domain2.getId()); assertEquals("R", domain2.getAddressType()); assertEquals("0.17.10.in-addr.arpa", domain2.getName()); Domain domain3 = domainList.get(2); assertEquals(Long.valueOf(3), domain3.getId()); assertEquals("R", domain3.getAddressType()); assertEquals("0.170.10.in-addr.arpa", domain3.getName()); } @Test public void testCreateDomain() throws DNSAPIException { String newDomainName = "newdomain.com"; String newAuthType = "M"; this.rp.registerFakeRequest(HttpMethod.POST, "/domains.json", "{\"domain\":{\"account\":null,\"addressing_type\":\"N\",\"authority_type\":\"M\",\"created_at\":\"2014-03-25T01:04:05Z\",\"id\":100,\"last_check\":null,\"master\":null,\"name\":\"newdomain.com\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-25T01:04:05Z\",\"user_id\":null,\"view_id\":null}}"); Domain createdDomain = this.domainAPI.createDomain(newDomainName, 1L, newAuthType); assertNotNull(createdDomain); assertEquals(newDomainName, createdDomain.getName()); assertEquals(newAuthType, createdDomain.getAuthorityType()); } @Test(expected=DNSAPIException.class) public void testCreateDomainAlreadyExists() throws DNSAPIException { String newDomainName = "newdomain.com"; String newAuthType = "M"; this.rp.registerFakeRequest(HttpMethod.POST, "/domains.json", 422, "{\"errors\":{\"name\":[\"has already been taken\"]}}"); this.domainAPI.createDomain(newDomainName, 1L, newAuthType); } }
Tests for creating reverse domain
src/test/java/com/globo/dnsapi/api/DomainAPITest.java
Tests for creating reverse domain
<ide><path>rc/test/java/com/globo/dnsapi/api/DomainAPITest.java <ide> <ide> this.domainAPI.createDomain(newDomainName, 1L, newAuthType); <ide> } <add> <add> @Test <add> public void testCreateReverseDomain() throws DNSAPIException { <add> String newReverseDomainName = "0.10.10.in-addr.arpa"; <add> String newReverseAuthType = "M"; <add> this.rp.registerFakeRequest(HttpMethod.POST, "/domains.json?reverse=true", <add> "{\"domain\":{\"account\":null,\"addressing_type\":\"R\",\"authority_type\":\"M\",\"created_at\":\"2014-03-28T20:40:34Z\",\"id\":20,\"last_check\":null,\"master\":null,\"name\":\"0.10.10.in-addr.arpa\",\"notes\":null,\"notified_serial\":null,\"ttl\":\"10800\",\"updated_at\":\"2014-03-28T20:40:34Z\",\"user_id\":null,\"view_id\":null}}"); <add> <add> Domain createdReverseDomain = this.domainAPI.createReverseDomain(newReverseDomainName, 1L, newReverseAuthType); <add> assertNotNull(createdReverseDomain); <add> assertEquals(newReverseDomainName, createdReverseDomain.getName()); <add> assertEquals(newReverseAuthType, createdReverseDomain.getAuthorityType()); <add> } <ide> }
Java
mpl-2.0
f85b015a6d547688ce92f199ef5fb387119621b9
0
MozillaCZ/MozStumbler,priyankvex/MozStumbler,garvankeeley/MozStumbler,crankycoder/MozStumbler,MozillaCZ/MozStumbler,garvankeeley/MozStumbler,priyankvex/MozStumbler,petercpg/MozStumbler,MozillaCZ/MozStumbler,petercpg/MozStumbler,garvankeeley/MozStumbler,petercpg/MozStumbler,priyankvex/MozStumbler,cascheberg/MozStumbler,crankycoder/MozStumbler,cascheberg/MozStumbler,cascheberg/MozStumbler,crankycoder/MozStumbler
package org.mozilla.mozstumbler.client; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Handler; import android.os.Message; import android.support.v4.content.LocalBroadcastManager; import android.os.Bundle; import android.text.Html; import android.util.AttributeSet; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ScrollView; import android.widget.TextView; import org.mozilla.mozstumbler.R; import org.mozilla.mozstumbler.service.AppGlobals; import java.lang.ref.WeakReference; import java.util.LinkedList; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.ConcurrentLinkedQueue; public class LogActivity extends Activity { static LinkedList<String> buffer = new LinkedList<String>(); static final int MAX_SIZE = 1000; private static LogMessageReceiver sInstance; public static class LogMessageReceiver extends BroadcastReceiver { // Ensure that the message buffer used by the GUI is accessed only on the main thread static class AddToBufferOnMain extends Handler { WeakReference<LogMessageReceiver> mParentClass; public AddToBufferOnMain(WeakReference<LogMessageReceiver> parent) { mParentClass = parent; } public void handleMessage(Message m) { String msg = null; do { msg = AppGlobals.guiLogMessageBuffer.poll(); if (mParentClass.get() != null) mParentClass.get().addMessageToBuffer(msg); } while (msg != null); } } Timer mFlushMessagesTimer = new Timer(); AddToBufferOnMain mMainThreadHandler; public static void createGlobalInstance(Context context) { sInstance = new LogMessageReceiver(context); sInstance.mMainThreadHandler = new AddToBufferOnMain(new WeakReference<LogMessageReceiver>(sInstance)); AppGlobals.guiLogMessageBuffer = new ConcurrentLinkedQueue<String>(); } LogMessageReceiver(Context context) { LocalBroadcastManager.getInstance(context).registerReceiver(this, new IntentFilter(AppGlobals.ACTION_GUI_LOG_MESSAGE)); final int kMillis = 1000 * 3; mFlushMessagesTimer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { mMainThreadHandler.obtainMessage().sendToTarget(); } }, kMillis, kMillis); } void addMessageToBuffer(String s) { if (s == null) return; if (buffer.size() > MAX_SIZE) { buffer.removeFirst(); } int kMaxChars = 150; int kBufSizeBeforeTruncate = 30; if (buffer.size() == kBufSizeBeforeTruncate + 1) { String msg = "BUFFER REACHED " + kBufSizeBeforeTruncate +" MESSAGES. TRUNCATING MESSAGES."; buffer.add(msg); if (sConsoleView != null) sConsoleView.println(msg); } if (buffer.size() > kBufSizeBeforeTruncate && s.length() > kMaxChars) { s = s.substring(0, kMaxChars) + " ..."; } buffer.add(s); if (sConsoleView != null) { sConsoleView.println(s); } } @Override public void onReceive(Context c, Intent intent) { String s = intent.getStringExtra(AppGlobals.ACTION_GUI_LOG_MESSAGE_EXTRA); addMessageToBuffer(s); } } ConsoleView mConsoleView; static ConsoleView sConsoleView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_log); mConsoleView = (ConsoleView) findViewById(R.id.scrollview); } @Override protected void onResume() { super.onResume(); sConsoleView = mConsoleView; for (String s: buffer) { mConsoleView.println(s); } } @Override protected void onPause() { super.onPause(); sConsoleView = null; } public static class ConsoleView extends ScrollView { public TextView tv; boolean enable_scroll = true; void init(Context context) { tv = new TextView(context); addView(tv); tv.setTextSize(13.0f); tv.setClickable(false); enableScroll(true); } public ConsoleView(Context context) { super(context); init(context); } public ConsoleView(Context context, AttributeSet attrs) { super(context, attrs); init(context); } public ConsoleView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(context); } public void enableScroll(boolean v) { this.enable_scroll = v; } public void print(String str){ tv.append(Html.fromHtml(str + "<br />")); if (enable_scroll) { scrollTo(0,tv.getBottom()); } } public void println(String str){ print(str + "\n"); } public void clear() { tv.setText(""); this.scrollTo(0, 0); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.log_menu, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.scroll_to_start: this.mConsoleView.fullScroll(View.FOCUS_UP); return true; case R.id.scroll_to_end: this.mConsoleView.fullScroll(View.FOCUS_DOWN); return true; default: return super.onOptionsItemSelected(item); } } }
src/org/mozilla/mozstumbler/client/LogActivity.java
package org.mozilla.mozstumbler.client; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Handler; import android.os.Message; import android.support.v4.content.LocalBroadcastManager; import android.os.Bundle; import android.text.Html; import android.util.AttributeSet; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ScrollView; import android.widget.TextView; import org.mozilla.mozstumbler.R; import org.mozilla.mozstumbler.service.SharedConstants; import java.util.Date; import java.util.LinkedList; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.ConcurrentLinkedQueue; public class LogActivity extends Activity { static LinkedList<String> buffer = new LinkedList<String>(); static final int MAX_SIZE = 1000; private static LogMessageReceiver sInstance; public static class LogMessageReceiver extends BroadcastReceiver { Timer mFlushMessagesTimer = new Timer(); Handler mMainThreadHandler = new Handler() { public void handleMessage(Message m) { String msg = null; do { msg = SharedConstants.guiLogMessageBuffer.poll(); addMessageToBuffer(msg); } while (msg != null); } }; public static void createGlobalInstance(Context context) { sInstance = new LogMessageReceiver(context); SharedConstants.guiLogMessageBuffer = new ConcurrentLinkedQueue<String>(); } LogMessageReceiver(Context context) { LocalBroadcastManager.getInstance(context).registerReceiver(this, new IntentFilter(SharedConstants.ACTION_GUI_LOG_MESSAGE)); final int kMillis = 1000 * 3; mFlushMessagesTimer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { mMainThreadHandler.obtainMessage().sendToTarget(); } }, kMillis, kMillis); } void addMessageToBuffer(String s) { if (s == null) return; if (buffer.size() > MAX_SIZE) { buffer.removeFirst(); } int kMaxChars = 150; int kBufSizeBeforeTruncate = 30; if (buffer.size() == kBufSizeBeforeTruncate + 1) { String msg = "BUFFER REACHED " + kBufSizeBeforeTruncate +" MESSAGES. TRUNCATING MESSAGES."; buffer.add(msg); if (sConsoleView != null) sConsoleView.println(msg); } if (buffer.size() > kBufSizeBeforeTruncate && s.length() > kMaxChars) { s = s.substring(0, kMaxChars) + " ..."; } buffer.add(s); if (sConsoleView != null) { sConsoleView.println(s); } } @Override public void onReceive(Context c, Intent intent) { String s = intent.getStringExtra(SharedConstants.ACTION_GUI_LOG_MESSAGE_EXTRA); addMessageToBuffer(s); } } ConsoleView mConsoleView; static ConsoleView sConsoleView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_log); mConsoleView = (ConsoleView) findViewById(R.id.scrollview); } @Override protected void onResume() { super.onResume(); sConsoleView = mConsoleView; for (String s: buffer) { mConsoleView.println(s); } } @Override protected void onPause() { super.onPause(); sConsoleView = null; } public static class ConsoleView extends ScrollView { public TextView tv; boolean enable_scroll = true; void init(Context context) { tv = new TextView(context); addView(tv); tv.setTextSize(13.0f); tv.setClickable(false); enableScroll(true); } public ConsoleView(Context context) { super(context); init(context); } public ConsoleView(Context context, AttributeSet attrs) { super(context, attrs); init(context); } public ConsoleView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(context); } public void enableScroll(boolean v) { this.enable_scroll = v; } public void print(String str){ tv.append(Html.fromHtml(str + "<br />")); if (enable_scroll) { scrollTo(0,tv.getBottom()); } } public void println(String str){ print(str + "\n"); } public void clear() { tv.setText(""); this.scrollTo(0, 0); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.log_menu, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.scroll_to_start: this.mConsoleView.fullScroll(View.FOCUS_UP); return true; case R.id.scroll_to_end: this.mConsoleView.fullScroll(View.FOCUS_DOWN); return true; default: return super.onOptionsItemSelected(item); } } }
Lint complains about a possible circular reference in this file. Changed the code to use have the inner class reference the outer using a weak reference, and the warning is gone
src/org/mozilla/mozstumbler/client/LogActivity.java
Lint complains about a possible circular reference in this file. Changed the code to use have the inner class reference the outer using a weak reference, and the warning is gone
<ide><path>rc/org/mozilla/mozstumbler/client/LogActivity.java <ide> import android.widget.ScrollView; <ide> import android.widget.TextView; <ide> import org.mozilla.mozstumbler.R; <del>import org.mozilla.mozstumbler.service.SharedConstants; <del> <del>import java.util.Date; <add>import org.mozilla.mozstumbler.service.AppGlobals; <add> <add>import java.lang.ref.WeakReference; <ide> import java.util.LinkedList; <ide> import java.util.Timer; <ide> import java.util.TimerTask; <ide> <ide> public static class LogMessageReceiver extends BroadcastReceiver { <ide> <del> Timer mFlushMessagesTimer = new Timer(); <del> Handler mMainThreadHandler = new Handler() { <add> // Ensure that the message buffer used by the GUI is accessed only on the main thread <add> static class AddToBufferOnMain extends Handler { <add> WeakReference<LogMessageReceiver> mParentClass; <add> <add> public AddToBufferOnMain(WeakReference<LogMessageReceiver> parent) { <add> mParentClass = parent; <add> } <add> <ide> public void handleMessage(Message m) { <ide> String msg = null; <ide> do { <del> msg = SharedConstants.guiLogMessageBuffer.poll(); <del> addMessageToBuffer(msg); <add> msg = AppGlobals.guiLogMessageBuffer.poll(); <add> if (mParentClass.get() != null) <add> mParentClass.get().addMessageToBuffer(msg); <ide> } while (msg != null); <ide> } <del> }; <add> } <add> <add> Timer mFlushMessagesTimer = new Timer(); <add> AddToBufferOnMain mMainThreadHandler; <ide> <ide> public static void createGlobalInstance(Context context) { <ide> sInstance = new LogMessageReceiver(context); <del> SharedConstants.guiLogMessageBuffer = new ConcurrentLinkedQueue<String>(); <add> sInstance.mMainThreadHandler = new AddToBufferOnMain(new WeakReference<LogMessageReceiver>(sInstance)); <add> AppGlobals.guiLogMessageBuffer = new ConcurrentLinkedQueue<String>(); <ide> } <ide> <ide> LogMessageReceiver(Context context) { <ide> LocalBroadcastManager.getInstance(context).registerReceiver(this, <del> new IntentFilter(SharedConstants.ACTION_GUI_LOG_MESSAGE)); <add> new IntentFilter(AppGlobals.ACTION_GUI_LOG_MESSAGE)); <ide> <ide> final int kMillis = 1000 * 3; <ide> mFlushMessagesTimer.scheduleAtFixedRate(new TimerTask() { <ide> <ide> @Override <ide> public void onReceive(Context c, Intent intent) { <del> String s = intent.getStringExtra(SharedConstants.ACTION_GUI_LOG_MESSAGE_EXTRA); <add> String s = intent.getStringExtra(AppGlobals.ACTION_GUI_LOG_MESSAGE_EXTRA); <ide> addMessageToBuffer(s); <ide> } <ide> }
Java
apache-2.0
36003385f3fb321a52af6533374fa7b3489b1ec7
0
rborer/google-cloud-java,mbrukman/gcloud-java,FirebasePrivate/google-cloud-java,tangiel/google-cloud-java,mbrukman/gcloud-java,omaray/google-cloud-java,omaray/google-cloud-java,omaray/google-cloud-java,shinfan/gcloud-java,FirebasePrivate/google-cloud-java,rborer/google-cloud-java,vam-google/google-cloud-java,FirebasePrivate/google-cloud-java,vam-google/google-cloud-java,shinfan/gcloud-java,aozarov/gcloud-java,tangiel/google-cloud-java,tangiel/google-cloud-java,FirebasePrivate/google-cloud-java,aozarov/gcloud-java,jabubake/google-cloud-java,aozarov/gcloud-java,tangiel/google-cloud-java,vam-google/google-cloud-java,mbrukman/gcloud-java,omaray/google-cloud-java,rborer/google-cloud-java,vam-google/google-cloud-java,jabubake/google-cloud-java,jabubake/google-cloud-java,vam-google/google-cloud-java,rborer/google-cloud-java,vam-google/google-cloud-java,tangiel/google-cloud-java,shinfan/gcloud-java,omaray/google-cloud-java,jabubake/google-cloud-java,rborer/google-cloud-java,shinfan/gcloud-java,shinfan/gcloud-java,aozarov/gcloud-java,FirebasePrivate/google-cloud-java,mbrukman/gcloud-java,jabubake/google-cloud-java
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gcloud.storage; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.gcloud.storage.Bucket.BucketSourceOption.toGetOptions; import static com.google.gcloud.storage.Bucket.BucketSourceOption.toSourceOptions; import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.gcloud.Page; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BucketTargetOption; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Set; /** * A Google cloud storage bucket. * * <p>Objects of this class are immutable. Operations that modify the bucket like {@link #update} * return a new object. To get a {@code Bucket} object with the most recent information use * {@link #reload}. {@code Bucket} adds a layer of service-related functionality over * {@link BucketInfo}. * </p> */ public final class Bucket extends BucketInfo { private static final long serialVersionUID = 8574601739542252586L; private final StorageOptions options; private transient Storage storage; /** * Class for specifying bucket source options when {@code Bucket} methods are used. */ public static class BucketSourceOption extends Option { private static final long serialVersionUID = 6928872234155522371L; private BucketSourceOption(StorageRpc.Option rpcOption) { super(rpcOption, null); } private Storage.BucketSourceOption toSourceOption(BucketInfo bucketInfo) { switch (rpcOption()) { case IF_METAGENERATION_MATCH: return Storage.BucketSourceOption.metagenerationMatch(bucketInfo.metageneration()); case IF_METAGENERATION_NOT_MATCH: return Storage.BucketSourceOption.metagenerationNotMatch(bucketInfo.metageneration()); default: throw new AssertionError("Unexpected enum value"); } } private Storage.BucketGetOption toGetOption(BucketInfo bucketInfo) { switch (rpcOption()) { case IF_METAGENERATION_MATCH: return Storage.BucketGetOption.metagenerationMatch(bucketInfo.metageneration()); case IF_METAGENERATION_NOT_MATCH: return Storage.BucketGetOption.metagenerationNotMatch(bucketInfo.metageneration()); default: throw new AssertionError("Unexpected enum value"); } } /** * Returns an option for bucket's metageneration match. If this option is used the request will * fail if metageneration does not match. */ public static BucketSourceOption metagenerationMatch() { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH); } /** * Returns an option for bucket's metageneration mismatch. If this option is used the request * will fail if metageneration matches. */ public static BucketSourceOption metagenerationNotMatch() { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } static Storage.BucketSourceOption[] toSourceOptions(BucketInfo bucketInfo, BucketSourceOption... options) { Storage.BucketSourceOption[] convertedOptions = new Storage.BucketSourceOption[options.length]; int index = 0; for (BucketSourceOption option : options) { convertedOptions[index++] = option.toSourceOption(bucketInfo); } return convertedOptions; } static Storage.BucketGetOption[] toGetOptions(BucketInfo bucketInfo, BucketSourceOption... options) { Storage.BucketGetOption[] convertedOptions = new Storage.BucketGetOption[options.length]; int index = 0; for (BucketSourceOption option : options) { convertedOptions[index++] = option.toGetOption(bucketInfo); } return convertedOptions; } } /** * Class for specifying blob target options when {@code Bucket} methods are used. */ public static class BlobTargetOption extends Option { private static final Function<BlobTargetOption, StorageRpc.Option> TO_ENUM = new Function<BlobTargetOption, StorageRpc.Option>() { @Override public StorageRpc.Option apply(BlobTargetOption blobTargetOption) { return blobTargetOption.rpcOption(); } }; private static final long serialVersionUID = 8345296337342509425L; private BlobTargetOption(StorageRpc.Option rpcOption, Object value) { super(rpcOption, value); } private StorageRpc.Tuple<BlobInfo, Storage.BlobTargetOption> toTargetOption(BlobInfo blobInfo) { BlobId blobId = blobInfo.blobId(); switch (rpcOption()) { case PREDEFINED_ACL: return StorageRpc.Tuple.of(blobInfo, Storage.BlobTargetOption.predefinedAcl((Storage.PredefinedAcl) value())); case IF_GENERATION_MATCH: blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value()); return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), Storage.BlobTargetOption.generationMatch()); case IF_GENERATION_NOT_MATCH: blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value()); return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), Storage.BlobTargetOption.generationNotMatch()); case IF_METAGENERATION_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value()).build(), Storage.BlobTargetOption.metagenerationMatch()); case IF_METAGENERATION_NOT_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value()).build(), Storage.BlobTargetOption.metagenerationNotMatch()); default: throw new AssertionError("Unexpected enum value"); } } /** * Returns an option for specifying blob's predefined ACL configuration. */ public static BlobTargetOption predefinedAcl(Storage.PredefinedAcl acl) { return new BlobTargetOption(StorageRpc.Option.PREDEFINED_ACL, acl); } /** * Returns an option that causes an operation to succeed only if the target blob does not exist. * This option can not be provided together with {@link #generationMatch(long)} or * {@link #generationNotMatch(long)}. */ public static BlobTargetOption doesNotExist() { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH, 0L); } /** * Returns an option for blob's data generation match. If this option is used the request will * fail if generation does not match the provided value. This option can not be provided * together with {@link #generationNotMatch(long)} or {@link #doesNotExist()}. */ public static BlobTargetOption generationMatch(long generation) { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); } /** * Returns an option for blob's data generation mismatch. If this option is used the request * will fail if blob's generation matches the provided value. This option can not be provided * together with {@link #generationMatch(long)} or {@link #doesNotExist()}. */ public static BlobTargetOption generationNotMatch(long generation) { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, generation); } /** * Returns an option for blob's metageneration match. If this option is used the request will * fail if metageneration does not match the provided value. This option can not be provided * together with {@link #metagenerationNotMatch(long)}. */ public static BlobTargetOption metagenerationMatch(long metageneration) { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); } /** * Returns an option for blob's metageneration mismatch. If this option is used the request will * fail if metageneration matches the provided value. This option can not be provided together * with {@link #metagenerationMatch(long)}. */ public static BlobTargetOption metagenerationNotMatch(long metageneration) { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); } static StorageRpc.Tuple<BlobInfo, Storage.BlobTargetOption[]> toTargetOptions( BlobInfo info, BlobTargetOption... options) { Set<StorageRpc.Option> optionSet = Sets.immutableEnumSet(Lists.transform(Arrays.asList(options), TO_ENUM)); checkArgument(!(optionSet.contains(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH) && optionSet.contains(StorageRpc.Option.IF_METAGENERATION_MATCH)), "metagenerationMatch and metagenerationNotMatch options can not be both provided"); checkArgument(!(optionSet.contains(StorageRpc.Option.IF_GENERATION_NOT_MATCH) && optionSet.contains(StorageRpc.Option.IF_GENERATION_MATCH)), "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided"); Storage.BlobTargetOption[] convertedOptions = new Storage.BlobTargetOption[options.length]; BlobInfo targetInfo = info; int index = 0; for (BlobTargetOption option : options) { StorageRpc.Tuple<BlobInfo, Storage.BlobTargetOption> target = option.toTargetOption(targetInfo); targetInfo = target.x(); convertedOptions[index++] = target.y(); } return StorageRpc.Tuple.of(targetInfo, convertedOptions); } } /** * Class for specifying blob write options when {@code Bucket} methods are used. */ public static class BlobWriteOption implements Serializable { private static final Function<BlobWriteOption, Storage.BlobWriteOption.Option> TO_ENUM = new Function<BlobWriteOption, Storage.BlobWriteOption.Option>() { @Override public Storage.BlobWriteOption.Option apply(BlobWriteOption blobWriteOption) { return blobWriteOption.option; } }; private static final long serialVersionUID = 4722190734541993114L; private final Storage.BlobWriteOption.Option option; private final Object value; private StorageRpc.Tuple<BlobInfo, Storage.BlobWriteOption> toWriteOption(BlobInfo blobInfo) { BlobId blobId = blobInfo.blobId(); switch (option) { case PREDEFINED_ACL: return StorageRpc.Tuple.of(blobInfo, Storage.BlobWriteOption.predefinedAcl((Storage.PredefinedAcl) value)); case IF_GENERATION_MATCH: blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value); return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), Storage.BlobWriteOption.generationMatch()); case IF_GENERATION_NOT_MATCH: blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value); return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), Storage.BlobWriteOption.generationNotMatch()); case IF_METAGENERATION_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value).build(), Storage.BlobWriteOption.metagenerationMatch()); case IF_METAGENERATION_NOT_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value).build(), Storage.BlobWriteOption.metagenerationNotMatch()); case IF_MD5_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().md5((String) value).build(), Storage.BlobWriteOption.md5Match()); case IF_CRC32C_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().crc32c((String) value).build(), Storage.BlobWriteOption.crc32cMatch()); default: throw new AssertionError("Unexpected enum value"); } } private BlobWriteOption(Storage.BlobWriteOption.Option option, Object value) { this.option = option; this.value = value; } @Override public int hashCode() { return Objects.hash(option, value); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof BlobWriteOption)) { return false; } final BlobWriteOption other = (BlobWriteOption) obj; return this.option == other.option && Objects.equals(this.value, other.value); } /** * Returns an option for specifying blob's predefined ACL configuration. */ public static BlobWriteOption predefinedAcl(Storage.PredefinedAcl acl) { return new BlobWriteOption(Storage.BlobWriteOption.Option.PREDEFINED_ACL, acl); } /** * Returns an option that causes an operation to succeed only if the target blob does not exist. * This option can not be provided together with {@link #generationMatch(long)} or * {@link #generationNotMatch(long)}. */ public static BlobWriteOption doesNotExist() { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH, 0L); } /** * Returns an option for blob's data generation match. If this option is used the request will * fail if generation does not match the provided value. This option can not be provided * together with {@link #generationNotMatch(long)} or {@link #doesNotExist()}. */ public static BlobWriteOption generationMatch(long generation) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH, generation); } /** * Returns an option for blob's data generation mismatch. If this option is used the request * will fail if generation matches the provided value. This option can not be provided * together with {@link #generationMatch(long)} or {@link #doesNotExist()}. */ public static BlobWriteOption generationNotMatch(long generation) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_NOT_MATCH, generation); } /** * Returns an option for blob's metageneration match. If this option is used the request will * fail if metageneration does not match the provided value. This option can not be provided * together with {@link #metagenerationNotMatch(long)}. */ public static BlobWriteOption metagenerationMatch(long metageneration) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_METAGENERATION_MATCH, metageneration); } /** * Returns an option for blob's metageneration mismatch. If this option is used the request will * fail if metageneration matches the provided value. This option can not be provided together * with {@link #metagenerationMatch(long)}. */ public static BlobWriteOption metagenerationNotMatch(long metageneration) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_METAGENERATION_NOT_MATCH, metageneration); } /** * Returns an option for blob's data MD5 hash match. If this option is used the request will * fail if blobs' data MD5 hash does not match the provided value. */ public static BlobWriteOption md5Match(String md5) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_MD5_MATCH, md5); } /** * Returns an option for blob's data CRC32C checksum match. If this option is used the request * will fail if blobs' data CRC32C checksum does not match the provided value. */ public static BlobWriteOption crc32cMatch(String crc32c) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_CRC32C_MATCH, crc32c); } static StorageRpc.Tuple<BlobInfo, Storage.BlobWriteOption[]> toWriteOptions( BlobInfo info, BlobWriteOption... options) { Set<Storage.BlobWriteOption.Option> optionSet = Sets.immutableEnumSet(Lists.transform(Arrays.asList(options), TO_ENUM)); checkArgument(!(optionSet.contains(Storage.BlobWriteOption.Option.IF_METAGENERATION_NOT_MATCH) && optionSet.contains(Storage.BlobWriteOption.Option.IF_METAGENERATION_MATCH)), "metagenerationMatch and metagenerationNotMatch options can not be both provided"); checkArgument(!(optionSet.contains(Storage.BlobWriteOption.Option.IF_GENERATION_NOT_MATCH) && optionSet.contains(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH)), "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided"); Storage.BlobWriteOption[] convertedOptions = new Storage.BlobWriteOption[options.length]; BlobInfo writeInfo = info; int index = 0; for (BlobWriteOption option : options) { StorageRpc.Tuple<BlobInfo, Storage.BlobWriteOption> write = option.toWriteOption(writeInfo); writeInfo = write.x(); convertedOptions[index++] = write.y(); } return StorageRpc.Tuple.of(writeInfo, convertedOptions); } } /** * Builder for {@code Bucket}. */ public static class Builder extends BucketInfo.Builder { private final Storage storage; private final BucketInfo.BuilderImpl infoBuilder; Builder(Bucket bucket) { this.storage = bucket.storage; this.infoBuilder = new BucketInfo.BuilderImpl(bucket); } @Override public Builder name(String name) { infoBuilder.name(name); return this; } @Override Builder id(String id) { infoBuilder.id(id); return this; } @Override Builder owner(Acl.Entity owner) { infoBuilder.owner(owner); return this; } @Override Builder selfLink(String selfLink) { infoBuilder.selfLink(selfLink); return this; } @Override public Builder versioningEnabled(Boolean enable) { infoBuilder.versioningEnabled(enable); return this; } @Override public Builder indexPage(String indexPage) { infoBuilder.indexPage(indexPage); return this; } @Override public Builder notFoundPage(String notFoundPage) { infoBuilder.notFoundPage(notFoundPage); return this; } @Override public Builder deleteRules(Iterable<? extends DeleteRule> rules) { infoBuilder.deleteRules(rules); return this; } @Override public Builder storageClass(String storageClass) { infoBuilder.storageClass(storageClass); return this; } @Override public Builder location(String location) { infoBuilder.location(location); return this; } @Override Builder etag(String etag) { infoBuilder.etag(etag); return this; } @Override Builder createTime(Long createTime) { infoBuilder.createTime(createTime); return this; } @Override Builder metageneration(Long metageneration) { infoBuilder.metageneration(metageneration); return this; } @Override public Builder cors(Iterable<Cors> cors) { infoBuilder.cors(cors); return this; } @Override public Builder acl(Iterable<Acl> acl) { infoBuilder.acl(acl); return this; } @Override public Builder defaultAcl(Iterable<Acl> acl) { infoBuilder.defaultAcl(acl); return this; } @Override public Bucket build() { return new Bucket(storage, infoBuilder); } } Bucket(Storage storage, BucketInfo.BuilderImpl infoBuilder) { super(infoBuilder); this.storage = checkNotNull(storage); this.options = storage.options(); } /** * Checks if this bucket exists. * * @return true if this bucket exists, false otherwise * @throws StorageException upon failure */ public boolean exists(BucketSourceOption... options) { int length = options.length; Storage.BucketGetOption[] getOptions = Arrays.copyOf(toGetOptions(this, options), length + 1); getOptions[length] = Storage.BucketGetOption.fields(); return storage.get(name(), getOptions) != null; } /** * Fetches current bucket's latest information. Returns {@code null} if the bucket does not exist. * * @param options bucket read options * @return a {@code Bucket} object with latest information or {@code null} if not found * @throws StorageException upon failure */ public Bucket reload(BucketSourceOption... options) { return storage.get(name(), toGetOptions(this, options)); } /** * Updates the bucket's information. Bucket's name cannot be changed. A new {@code Bucket} object * is returned. By default no checks are made on the metadata generation of the current bucket. * If you want to update the information only if the current bucket metadata are at their latest * version use the {@code metagenerationMatch} option: * {@code bucket.update(BucketTargetOption.metagenerationMatch())} * * @param options update options * @return a {@code Bucket} object with updated information * @throws StorageException upon failure */ public Bucket update(BucketTargetOption... options) { return storage.update(this, options); } /** * Deletes this bucket. * * @param options bucket delete options * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure */ public boolean delete(BucketSourceOption... options) { return storage.delete(name(), toSourceOptions(this, options)); } /** * Returns the paginated list of {@code Blob} in this bucket. * * @param options options for listing blobs * @throws StorageException upon failure */ public Page<Blob> list(Storage.BlobListOption... options) { return storage.list(name(), options); } /** * Returns the requested blob in this bucket or {@code null} if not found. * * @param blob name of the requested blob * @param options blob search options * @throws StorageException upon failure */ public Blob get(String blob, BlobGetOption... options) { return storage.get(BlobId.of(name(), blob), options); } /** * Returns a list of requested blobs in this bucket. Blobs that do not exist are null. * * @param blobName1 first blob to get * @param blobName2 second blob to get * @param blobNames other blobs to get * @return an immutable list of {@code Blob} objects * @throws StorageException upon failure */ public List<Blob> get(String blobName1, String blobName2, String... blobNames) { BatchRequest.Builder batch = BatchRequest.builder(); batch.get(name(), blobName1); batch.get(name(), blobName2); for (String name : blobNames) { batch.get(name(), name); } List<Blob> blobs = new ArrayList<>(blobNames.length); BatchResponse response = storage.submit(batch.build()); for (BatchResponse.Result<Blob> result : response.gets()) { BlobInfo blobInfo = result.get(); blobs.add(blobInfo != null ? new Blob(storage, new BlobInfo.BuilderImpl(blobInfo)) : null); } return Collections.unmodifiableList(blobs); } /** * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} * is recommended as it uses resumable upload. MD5 and CRC32C hashes of {@code content} are * computed and used for validating transferred data. * * @param blob a blob name * @param content the blob content * @param contentType the blob content type. If {@code null} then * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation * @return a complete blob information * @throws StorageException upon failure */ public Blob create(String blob, byte[] content, String contentType, BlobTargetOption... options) { BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)) .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); StorageRpc.Tuple<BlobInfo, Storage.BlobTargetOption[]> target = BlobTargetOption.toTargetOptions(blobInfo, options); return storage.create(target.x(), content, target.y()); } /** * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} * is recommended as it uses resumable upload. * * @param blob a blob name * @param content the blob content as a stream * @param contentType the blob content type. If {@code null} then * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation * @return a complete blob information * @throws StorageException upon failure */ public Blob create(String blob, InputStream content, String contentType, BlobWriteOption... options) { BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)) .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); StorageRpc.Tuple<BlobInfo, Storage.BlobWriteOption[]> write = BlobWriteOption.toWriteOptions(blobInfo, options); return storage.create(write.x(), content, write.y()); } /** * Returns the bucket's {@code Storage} object used to issue requests. */ public Storage storage() { return storage; } @Override public Builder toBuilder() { return new Builder(this); } @Override public boolean equals(Object obj) { return obj instanceof Bucket && Objects.equals(toPb(), ((Bucket) obj).toPb()) && Objects.equals(options, ((Bucket) obj).options); } @Override public int hashCode() { return Objects.hash(super.hashCode(), options); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); this.storage = options.service(); } static Bucket fromPb(Storage storage, com.google.api.services.storage.model.Bucket bucketPb) { return new Bucket(storage, new BucketInfo.BuilderImpl(BucketInfo.fromPb(bucketPb))); } }
gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java
/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gcloud.storage; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.gcloud.storage.Bucket.BucketSourceOption.toGetOptions; import static com.google.gcloud.storage.Bucket.BucketSourceOption.toSourceOptions; import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.gcloud.Page; import com.google.gcloud.spi.StorageRpc; import com.google.gcloud.storage.Storage.BlobGetOption; import com.google.gcloud.storage.Storage.BucketTargetOption; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Set; /** * A Google cloud storage bucket. * * <p>Objects of this class are immutable. Operations that modify the bucket like {@link #update} * return a new object. To get a {@code Bucket} object with the most recent information use * {@link #reload}. {@code Bucket} adds a layer of service-related functionality over * {@link BucketInfo}. * </p> */ public final class Bucket extends BucketInfo { private static final long serialVersionUID = 8574601739542252586L; private final StorageOptions options; private transient Storage storage; /** * Class for specifying bucket source options when {@code Bucket} methods are used. */ public static class BucketSourceOption extends Option { private static final long serialVersionUID = 6928872234155522371L; private BucketSourceOption(StorageRpc.Option rpcOption) { super(rpcOption, null); } private Storage.BucketSourceOption toSourceOption(BucketInfo bucketInfo) { switch (rpcOption()) { case IF_METAGENERATION_MATCH: return Storage.BucketSourceOption.metagenerationMatch(bucketInfo.metageneration()); case IF_METAGENERATION_NOT_MATCH: return Storage.BucketSourceOption.metagenerationNotMatch(bucketInfo.metageneration()); default: throw new AssertionError("Unexpected enum value"); } } private Storage.BucketGetOption toGetOption(BucketInfo bucketInfo) { switch (rpcOption()) { case IF_METAGENERATION_MATCH: return Storage.BucketGetOption.metagenerationMatch(bucketInfo.metageneration()); case IF_METAGENERATION_NOT_MATCH: return Storage.BucketGetOption.metagenerationNotMatch(bucketInfo.metageneration()); default: throw new AssertionError("Unexpected enum value"); } } /** * Returns an option for bucket's metageneration match. If this option is used the request will * fail if metageneration does not match. */ public static BucketSourceOption metagenerationMatch() { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_MATCH); } /** * Returns an option for bucket's metageneration mismatch. If this option is used the request * will fail if metageneration matches. */ public static BucketSourceOption metagenerationNotMatch() { return new BucketSourceOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH); } static Storage.BucketSourceOption[] toSourceOptions(BucketInfo bucketInfo, BucketSourceOption... options) { Storage.BucketSourceOption[] convertedOptions = new Storage.BucketSourceOption[options.length]; int index = 0; for (BucketSourceOption option : options) { convertedOptions[index++] = option.toSourceOption(bucketInfo); } return convertedOptions; } static Storage.BucketGetOption[] toGetOptions(BucketInfo bucketInfo, BucketSourceOption... options) { Storage.BucketGetOption[] convertedOptions = new Storage.BucketGetOption[options.length]; int index = 0; for (BucketSourceOption option : options) { convertedOptions[index++] = option.toGetOption(bucketInfo); } return convertedOptions; } } /** * Class for specifying blob target options when {@code Bucket} methods are used. */ public static class BlobTargetOption extends Option { private static final Function<BlobTargetOption, StorageRpc.Option> TO_ENUM = new Function<BlobTargetOption, StorageRpc.Option>() { @Override public StorageRpc.Option apply(BlobTargetOption blobTargetOption) { return blobTargetOption.rpcOption(); } }; private static final long serialVersionUID = 8345296337342509425L; private BlobTargetOption(StorageRpc.Option rpcOption, Object value) { super(rpcOption, value); } private StorageRpc.Tuple<BlobInfo, Storage.BlobTargetOption> toTargetOption(BlobInfo blobInfo) { BlobId blobId = blobInfo.blobId(); switch (rpcOption()) { case PREDEFINED_ACL: return StorageRpc.Tuple.of(blobInfo, Storage.BlobTargetOption.predefinedAcl((Storage.PredefinedAcl) value())); case IF_GENERATION_MATCH: blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value()); return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), Storage.BlobTargetOption.generationMatch()); case IF_GENERATION_NOT_MATCH: blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value()); return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), Storage.BlobTargetOption.generationNotMatch()); case IF_METAGENERATION_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value()).build(), Storage.BlobTargetOption.metagenerationMatch()); case IF_METAGENERATION_NOT_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value()).build(), Storage.BlobTargetOption.metagenerationNotMatch()); default: throw new AssertionError("Unexpected enum value"); } } /** * Returns an option for specifying blob's predefined ACL configuration. */ public static BlobTargetOption predefinedAcl(Storage.PredefinedAcl acl) { return new BlobTargetOption(StorageRpc.Option.PREDEFINED_ACL, acl); } /** * Returns an option that causes an operation to succeed only if the target blob does not exist. * This option can not be provided together with {@link #generationMatch(long)} or * {@link #generationNotMatch(long)}. */ public static BlobTargetOption doesNotExist() { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH, 0L); } /** * Returns an option for blob's data generation match. If this option is used the request will * fail if generation does not match the provided value. This option can not be provided * together with {@link #generationNotMatch(long)} or {@link #doesNotExist()}. */ public static BlobTargetOption generationMatch(long generation) { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_MATCH, generation); } /** * Returns an option for blob's data generation mismatch. If this option is used the request * will fail if blob's generation matches the provided value. This option can not be provided * together with {@link #generationMatch(long)} or {@link #doesNotExist()}. */ public static BlobTargetOption generationNotMatch(long generation) { return new BlobTargetOption(StorageRpc.Option.IF_GENERATION_NOT_MATCH, generation); } /** * Returns an option for blob's metageneration match. If this option is used the request will * fail if metageneration does not match the provided value. Either this option or * {@link #metagenerationNotMatch(long)} can be provided at the same time. */ public static BlobTargetOption metagenerationMatch(long metageneration) { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); } /** * Returns an option for blob's metageneration mismatch. If this option is used the request will * fail if metageneration matches the provided value. Either this option or * {@link #metagenerationMatch(long)} can be provided at the same time. */ public static BlobTargetOption metagenerationNotMatch(long metageneration) { return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); } static StorageRpc.Tuple<BlobInfo, Storage.BlobTargetOption[]> toTargetOptions( BlobInfo info, BlobTargetOption... options) { Set<StorageRpc.Option> optionSet = Sets.immutableEnumSet(Lists.transform(Arrays.asList(options), TO_ENUM)); checkArgument(!(optionSet.contains(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH) && optionSet.contains(StorageRpc.Option.IF_METAGENERATION_MATCH)), "metagenerationMatch and metagenerationNotMatch options can not be both provided"); checkArgument(!(optionSet.contains(StorageRpc.Option.IF_GENERATION_NOT_MATCH) && optionSet.contains(StorageRpc.Option.IF_GENERATION_MATCH)), "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided"); Storage.BlobTargetOption[] convertedOptions = new Storage.BlobTargetOption[options.length]; BlobInfo targetInfo = info; int index = 0; for (BlobTargetOption option : options) { StorageRpc.Tuple<BlobInfo, Storage.BlobTargetOption> target = option.toTargetOption(targetInfo); targetInfo = target.x(); convertedOptions[index++] = target.y(); } return StorageRpc.Tuple.of(targetInfo, convertedOptions); } } /** * Class for specifying blob write options when {@code Bucket} methods are used. */ public static class BlobWriteOption implements Serializable { private static final Function<BlobWriteOption, Storage.BlobWriteOption.Option> TO_ENUM = new Function<BlobWriteOption, Storage.BlobWriteOption.Option>() { @Override public Storage.BlobWriteOption.Option apply(BlobWriteOption blobWriteOption) { return blobWriteOption.option; } }; private static final long serialVersionUID = 4722190734541993114L; private final Storage.BlobWriteOption.Option option; private final Object value; private StorageRpc.Tuple<BlobInfo, Storage.BlobWriteOption> toWriteOption(BlobInfo blobInfo) { BlobId blobId = blobInfo.blobId(); switch (option) { case PREDEFINED_ACL: return StorageRpc.Tuple.of(blobInfo, Storage.BlobWriteOption.predefinedAcl((Storage.PredefinedAcl) value)); case IF_GENERATION_MATCH: blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value); return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), Storage.BlobWriteOption.generationMatch()); case IF_GENERATION_NOT_MATCH: blobId = BlobId.of(blobId.bucket(), blobId.name(), (Long) value); return StorageRpc.Tuple.of(blobInfo.toBuilder().blobId(blobId).build(), Storage.BlobWriteOption.generationNotMatch()); case IF_METAGENERATION_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value).build(), Storage.BlobWriteOption.metagenerationMatch()); case IF_METAGENERATION_NOT_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().metageneration((Long) value).build(), Storage.BlobWriteOption.metagenerationNotMatch()); case IF_MD5_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().md5((String) value).build(), Storage.BlobWriteOption.md5Match()); case IF_CRC32C_MATCH: return StorageRpc.Tuple.of(blobInfo.toBuilder().crc32c((String) value).build(), Storage.BlobWriteOption.crc32cMatch()); default: throw new AssertionError("Unexpected enum value"); } } private BlobWriteOption(Storage.BlobWriteOption.Option option, Object value) { this.option = option; this.value = value; } @Override public int hashCode() { return Objects.hash(option, value); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (!(obj instanceof BlobWriteOption)) { return false; } final BlobWriteOption other = (BlobWriteOption) obj; return this.option == other.option && Objects.equals(this.value, other.value); } /** * Returns an option for specifying blob's predefined ACL configuration. */ public static BlobWriteOption predefinedAcl(Storage.PredefinedAcl acl) { return new BlobWriteOption(Storage.BlobWriteOption.Option.PREDEFINED_ACL, acl); } /** * Returns an option that causes an operation to succeed only if the target blob does not exist. * This option can not be provided together with {@link #generationMatch(long)} or * {@link #generationNotMatch(long)}. */ public static BlobWriteOption doesNotExist() { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH, 0L); } /** * Returns an option for blob's data generation match. If this option is used the request will * fail if generation does not match the provided value. This option can not be provided * together with {@link #generationNotMatch(long)} or {@link #doesNotExist()}. */ public static BlobWriteOption generationMatch(long generation) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH, generation); } /** * Returns an option for blob's data generation mismatch. If this option is used the request * will fail if generation matches the provided value. This option can not be provided * together with {@link #generationMatch(long)} or {@link #doesNotExist()}. */ public static BlobWriteOption generationNotMatch(long generation) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_GENERATION_NOT_MATCH, generation); } /** * Returns an option for blob's metageneration match. If this option is used the request will * fail if metageneration does not match the provided value. Either this option or * {@link #metagenerationNotMatch(long)} can be provided at the same time. */ public static BlobWriteOption metagenerationMatch(long metageneration) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_METAGENERATION_MATCH, metageneration); } /** * Returns an option for blob's metageneration mismatch. If this option is used the request will * fail if metageneration matches the provided value. Either this option or * {@link #metagenerationMatch(long)} can be provided at the same time. */ public static BlobWriteOption metagenerationNotMatch(long metageneration) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_METAGENERATION_NOT_MATCH, metageneration); } /** * Returns an option for blob's data MD5 hash match. If this option is used the request will * fail if blobs' data MD5 hash does not match the provided value. */ public static BlobWriteOption md5Match(String md5) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_MD5_MATCH, md5); } /** * Returns an option for blob's data CRC32C checksum match. If this option is used the request * will fail if blobs' data CRC32C checksum does not match the provided value. */ public static BlobWriteOption crc32cMatch(String crc32c) { return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_CRC32C_MATCH, crc32c); } static StorageRpc.Tuple<BlobInfo, Storage.BlobWriteOption[]> toWriteOptions( BlobInfo info, BlobWriteOption... options) { Set<Storage.BlobWriteOption.Option> optionSet = Sets.immutableEnumSet(Lists.transform(Arrays.asList(options), TO_ENUM)); checkArgument(!(optionSet.contains(Storage.BlobWriteOption.Option.IF_METAGENERATION_NOT_MATCH) && optionSet.contains(Storage.BlobWriteOption.Option.IF_METAGENERATION_MATCH)), "metagenerationMatch and metagenerationNotMatch options can not be both provided"); checkArgument(!(optionSet.contains(Storage.BlobWriteOption.Option.IF_GENERATION_NOT_MATCH) && optionSet.contains(Storage.BlobWriteOption.Option.IF_GENERATION_MATCH)), "Only one option of generationMatch, doesNotExist or generationNotMatch can be provided"); Storage.BlobWriteOption[] convertedOptions = new Storage.BlobWriteOption[options.length]; BlobInfo writeInfo = info; int index = 0; for (BlobWriteOption option : options) { StorageRpc.Tuple<BlobInfo, Storage.BlobWriteOption> write = option.toWriteOption(writeInfo); writeInfo = write.x(); convertedOptions[index++] = write.y(); } return StorageRpc.Tuple.of(writeInfo, convertedOptions); } } /** * Builder for {@code Bucket}. */ public static class Builder extends BucketInfo.Builder { private final Storage storage; private final BucketInfo.BuilderImpl infoBuilder; Builder(Bucket bucket) { this.storage = bucket.storage; this.infoBuilder = new BucketInfo.BuilderImpl(bucket); } @Override public Builder name(String name) { infoBuilder.name(name); return this; } @Override Builder id(String id) { infoBuilder.id(id); return this; } @Override Builder owner(Acl.Entity owner) { infoBuilder.owner(owner); return this; } @Override Builder selfLink(String selfLink) { infoBuilder.selfLink(selfLink); return this; } @Override public Builder versioningEnabled(Boolean enable) { infoBuilder.versioningEnabled(enable); return this; } @Override public Builder indexPage(String indexPage) { infoBuilder.indexPage(indexPage); return this; } @Override public Builder notFoundPage(String notFoundPage) { infoBuilder.notFoundPage(notFoundPage); return this; } @Override public Builder deleteRules(Iterable<? extends DeleteRule> rules) { infoBuilder.deleteRules(rules); return this; } @Override public Builder storageClass(String storageClass) { infoBuilder.storageClass(storageClass); return this; } @Override public Builder location(String location) { infoBuilder.location(location); return this; } @Override Builder etag(String etag) { infoBuilder.etag(etag); return this; } @Override Builder createTime(Long createTime) { infoBuilder.createTime(createTime); return this; } @Override Builder metageneration(Long metageneration) { infoBuilder.metageneration(metageneration); return this; } @Override public Builder cors(Iterable<Cors> cors) { infoBuilder.cors(cors); return this; } @Override public Builder acl(Iterable<Acl> acl) { infoBuilder.acl(acl); return this; } @Override public Builder defaultAcl(Iterable<Acl> acl) { infoBuilder.defaultAcl(acl); return this; } @Override public Bucket build() { return new Bucket(storage, infoBuilder); } } Bucket(Storage storage, BucketInfo.BuilderImpl infoBuilder) { super(infoBuilder); this.storage = checkNotNull(storage); this.options = storage.options(); } /** * Checks if this bucket exists. * * @return true if this bucket exists, false otherwise * @throws StorageException upon failure */ public boolean exists(BucketSourceOption... options) { int length = options.length; Storage.BucketGetOption[] getOptions = Arrays.copyOf(toGetOptions(this, options), length + 1); getOptions[length] = Storage.BucketGetOption.fields(); return storage.get(name(), getOptions) != null; } /** * Fetches current bucket's latest information. Returns {@code null} if the bucket does not exist. * * @param options bucket read options * @return a {@code Bucket} object with latest information or {@code null} if not found * @throws StorageException upon failure */ public Bucket reload(BucketSourceOption... options) { return storage.get(name(), toGetOptions(this, options)); } /** * Updates the bucket's information. Bucket's name cannot be changed. A new {@code Bucket} object * is returned. By default no checks are made on the metadata generation of the current bucket. * If you want to update the information only if the current bucket metadata are at their latest * version use the {@code metagenerationMatch} option: * {@code bucket.update(BucketTargetOption.metagenerationMatch())} * * @param options update options * @return a {@code Bucket} object with updated information * @throws StorageException upon failure */ public Bucket update(BucketTargetOption... options) { return storage.update(this, options); } /** * Deletes this bucket. * * @param options bucket delete options * @return {@code true} if bucket was deleted, {@code false} if it was not found * @throws StorageException upon failure */ public boolean delete(BucketSourceOption... options) { return storage.delete(name(), toSourceOptions(this, options)); } /** * Returns the paginated list of {@code Blob} in this bucket. * * @param options options for listing blobs * @throws StorageException upon failure */ public Page<Blob> list(Storage.BlobListOption... options) { return storage.list(name(), options); } /** * Returns the requested blob in this bucket or {@code null} if not found. * * @param blob name of the requested blob * @param options blob search options * @throws StorageException upon failure */ public Blob get(String blob, BlobGetOption... options) { return storage.get(BlobId.of(name(), blob), options); } /** * Returns a list of requested blobs in this bucket. Blobs that do not exist are null. * * @param blobName1 first blob to get * @param blobName2 second blob to get * @param blobNames other blobs to get * @return an immutable list of {@code Blob} objects * @throws StorageException upon failure */ public List<Blob> get(String blobName1, String blobName2, String... blobNames) { BatchRequest.Builder batch = BatchRequest.builder(); batch.get(name(), blobName1); batch.get(name(), blobName2); for (String name : blobNames) { batch.get(name(), name); } List<Blob> blobs = new ArrayList<>(blobNames.length); BatchResponse response = storage.submit(batch.build()); for (BatchResponse.Result<Blob> result : response.gets()) { BlobInfo blobInfo = result.get(); blobs.add(blobInfo != null ? new Blob(storage, new BlobInfo.BuilderImpl(blobInfo)) : null); } return Collections.unmodifiableList(blobs); } /** * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} * is recommended as it uses resumable upload. MD5 and CRC32C hashes of {@code content} are * computed and used for validating transferred data. * * @param blob a blob name * @param content the blob content * @param contentType the blob content type. If {@code null} then * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation * @return a complete blob information * @throws StorageException upon failure */ public Blob create(String blob, byte[] content, String contentType, BlobTargetOption... options) { BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)) .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); StorageRpc.Tuple<BlobInfo, Storage.BlobTargetOption[]> target = BlobTargetOption.toTargetOptions(blobInfo, options); return storage.create(target.x(), content, target.y()); } /** * Creates a new blob in this bucket. Direct upload is used to upload {@code content}. * For large content, {@link Blob#writer(com.google.gcloud.storage.Storage.BlobWriteOption...)} * is recommended as it uses resumable upload. * * @param blob a blob name * @param content the blob content as a stream * @param contentType the blob content type. If {@code null} then * {@value com.google.gcloud.storage.Storage#DEFAULT_CONTENT_TYPE} is used. * @param options options for blob creation * @return a complete blob information * @throws StorageException upon failure */ public Blob create(String blob, InputStream content, String contentType, BlobWriteOption... options) { BlobInfo blobInfo = BlobInfo.builder(BlobId.of(name(), blob)) .contentType(MoreObjects.firstNonNull(contentType, Storage.DEFAULT_CONTENT_TYPE)).build(); StorageRpc.Tuple<BlobInfo, Storage.BlobWriteOption[]> write = BlobWriteOption.toWriteOptions(blobInfo, options); return storage.create(write.x(), content, write.y()); } /** * Returns the bucket's {@code Storage} object used to issue requests. */ public Storage storage() { return storage; } @Override public Builder toBuilder() { return new Builder(this); } @Override public boolean equals(Object obj) { return obj instanceof Bucket && Objects.equals(toPb(), ((Bucket) obj).toPb()) && Objects.equals(options, ((Bucket) obj).options); } @Override public int hashCode() { return Objects.hash(super.hashCode(), options); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); this.storage = options.service(); } static Bucket fromPb(Storage storage, com.google.api.services.storage.model.Bucket bucketPb) { return new Bucket(storage, new BucketInfo.BuilderImpl(BucketInfo.fromPb(bucketPb))); } }
Reword javadoc in BlobTargetOption and BlobWriteOption
gcloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java
Reword javadoc in BlobTargetOption and BlobWriteOption
<ide><path>cloud-java-storage/src/main/java/com/google/gcloud/storage/Bucket.java <ide> <ide> /** <ide> * Returns an option for blob's metageneration match. If this option is used the request will <del> * fail if metageneration does not match the provided value. Either this option or <del> * {@link #metagenerationNotMatch(long)} can be provided at the same time. <add> * fail if metageneration does not match the provided value. This option can not be provided <add> * together with {@link #metagenerationNotMatch(long)}. <ide> */ <ide> public static BlobTargetOption metagenerationMatch(long metageneration) { <ide> return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_MATCH, metageneration); <ide> <ide> /** <ide> * Returns an option for blob's metageneration mismatch. If this option is used the request will <del> * fail if metageneration matches the provided value. Either this option or <del> * {@link #metagenerationMatch(long)} can be provided at the same time. <add> * fail if metageneration matches the provided value. This option can not be provided together <add> * with {@link #metagenerationMatch(long)}. <ide> */ <ide> public static BlobTargetOption metagenerationNotMatch(long metageneration) { <ide> return new BlobTargetOption(StorageRpc.Option.IF_METAGENERATION_NOT_MATCH, metageneration); <ide> <ide> /** <ide> * Returns an option for blob's metageneration match. If this option is used the request will <del> * fail if metageneration does not match the provided value. Either this option or <del> * {@link #metagenerationNotMatch(long)} can be provided at the same time. <add> * fail if metageneration does not match the provided value. This option can not be provided <add> * together with {@link #metagenerationNotMatch(long)}. <ide> */ <ide> public static BlobWriteOption metagenerationMatch(long metageneration) { <ide> return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_METAGENERATION_MATCH, <ide> <ide> /** <ide> * Returns an option for blob's metageneration mismatch. If this option is used the request will <del> * fail if metageneration matches the provided value. Either this option or <del> * {@link #metagenerationMatch(long)} can be provided at the same time. <add> * fail if metageneration matches the provided value. This option can not be provided together <add> * with {@link #metagenerationMatch(long)}. <ide> */ <ide> public static BlobWriteOption metagenerationNotMatch(long metageneration) { <ide> return new BlobWriteOption(Storage.BlobWriteOption.Option.IF_METAGENERATION_NOT_MATCH,
Java
mit
ba6306d16580053d10a18a291dc09a10604500f2
0
sake/bouncycastle-java
package org.bouncycastle.crypto.util; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; import org.bouncycastle.asn1.ASN1InputStream; import org.bouncycastle.asn1.ASN1Object; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.DEREncodable; import org.bouncycastle.asn1.DERInteger; import org.bouncycastle.asn1.DERObject; import org.bouncycastle.asn1.DERObjectIdentifier; import org.bouncycastle.asn1.nist.NISTNamedCurves; import org.bouncycastle.asn1.oiw.ElGamalParameter; import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers; import org.bouncycastle.asn1.pkcs.DHParameter; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; import org.bouncycastle.asn1.pkcs.RSAPrivateKeyStructure; import org.bouncycastle.asn1.sec.ECPrivateKeyStructure; import org.bouncycastle.asn1.sec.SECNamedCurves; import org.bouncycastle.asn1.teletrust.TeleTrusTNamedCurves; import org.bouncycastle.asn1.x509.AlgorithmIdentifier; import org.bouncycastle.asn1.x509.DSAParameter; import org.bouncycastle.asn1.x9.X962NamedCurves; import org.bouncycastle.asn1.x9.X962Parameters; import org.bouncycastle.asn1.x9.X9ECParameters; import org.bouncycastle.asn1.x9.X9ObjectIdentifiers; import org.bouncycastle.crypto.params.AsymmetricKeyParameter; import org.bouncycastle.crypto.params.DHParameters; import org.bouncycastle.crypto.params.DHPrivateKeyParameters; import org.bouncycastle.crypto.params.DSAParameters; import org.bouncycastle.crypto.params.DSAPrivateKeyParameters; import org.bouncycastle.crypto.params.ECDomainParameters; import org.bouncycastle.crypto.params.ECPrivateKeyParameters; import org.bouncycastle.crypto.params.ElGamalParameters; import org.bouncycastle.crypto.params.ElGamalPrivateKeyParameters; import org.bouncycastle.crypto.params.RSAPrivateCrtKeyParameters; /** * Factory for creating private key objects from PKCS8 PrivateKeyInfo objects. */ public class PrivateKeyFactory { /** * Create a private key parameter from a PKCS8 PrivateKeyInfo encoding. * * @param privateKeyInfoData the PrivateKeyInfo encoding * @return a suitable private key parameter * @throws IOException on an error decoding the key */ public static AsymmetricKeyParameter createKey(byte[] privateKeyInfoData) throws IOException { return createKey(PrivateKeyInfo.getInstance(ASN1Object.fromByteArray(privateKeyInfoData))); } /** * Create a private key parameter from a PKCS8 PrivateKeyInfo encoding read from a * stream. * * @param inStr the stream to read the PrivateKeyInfo encoding from * @return a suitable private key parameter * @throws IOException on an error decoding the key */ public static AsymmetricKeyParameter createKey(InputStream inStr) throws IOException { return createKey(PrivateKeyInfo.getInstance(new ASN1InputStream(inStr).readObject())); } /** * Create a private key parameter from the passed in PKCS8 PrivateKeyInfo object. * * @param keyInfo the PrivateKeyInfo object containing the key material * @return a suitable private key parameter * @throws IOException on an error decoding the key */ public static AsymmetricKeyParameter createKey(PrivateKeyInfo keyInfo) throws IOException { AlgorithmIdentifier algId = keyInfo.getAlgorithmId(); if (algId.getObjectId().equals(PKCSObjectIdentifiers.rsaEncryption)) { RSAPrivateKeyStructure keyStructure = new RSAPrivateKeyStructure( (ASN1Sequence)keyInfo.getPrivateKey()); return new RSAPrivateCrtKeyParameters(keyStructure.getModulus(), keyStructure.getPublicExponent(), keyStructure.getPrivateExponent(), keyStructure.getPrime1(), keyStructure.getPrime2(), keyStructure.getExponent1(), keyStructure.getExponent2(), keyStructure.getCoefficient()); } // TODO? // else if (algId.getObjectId().equals(X9ObjectIdentifiers.dhpublicnumber)) else if (algId.getObjectId().equals(PKCSObjectIdentifiers.dhKeyAgreement)) { DHParameter params = new DHParameter( (ASN1Sequence)keyInfo.getAlgorithmId().getParameters()); DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); BigInteger lVal = params.getL(); int l = lVal == null ? 0 : lVal.intValue(); DHParameters dhParams = new DHParameters(params.getP(), params.getG(), null, l); return new DHPrivateKeyParameters(derX.getValue(), dhParams); } else if (algId.getObjectId().equals(OIWObjectIdentifiers.elGamalAlgorithm)) { ElGamalParameter params = new ElGamalParameter( (ASN1Sequence)keyInfo.getAlgorithmId().getParameters()); DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); return new ElGamalPrivateKeyParameters(derX.getValue(), new ElGamalParameters( params.getP(), params.getG())); } else if (algId.getObjectId().equals(X9ObjectIdentifiers.id_dsa)) { DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); DEREncodable de = keyInfo.getAlgorithmId().getParameters(); DSAParameters parameters = null; if (de != null) { DSAParameter params = DSAParameter.getInstance(de.getDERObject()); parameters = new DSAParameters(params.getP(), params.getQ(), params.getG()); } return new DSAPrivateKeyParameters(derX.getValue(), parameters); } else if (algId.getObjectId().equals(X9ObjectIdentifiers.id_ecPublicKey)) { X962Parameters params = new X962Parameters( (DERObject)keyInfo.getAlgorithmId().getParameters()); ECDomainParameters dParams = null; if (params.isNamedCurve()) { DERObjectIdentifier oid = (DERObjectIdentifier)params.getParameters(); X9ECParameters ecP = X962NamedCurves.getByOID(oid); if (ecP == null) { ecP = SECNamedCurves.getByOID(oid); if (ecP == null) { ecP = NISTNamedCurves.getByOID(oid); if (ecP == null) { ecP = TeleTrusTNamedCurves.getByOID(oid); } } } dParams = new ECDomainParameters(ecP.getCurve(), ecP.getG(), ecP.getN(), ecP.getH(), ecP.getSeed()); } else { X9ECParameters ecP = new X9ECParameters((ASN1Sequence)params.getParameters()); dParams = new ECDomainParameters(ecP.getCurve(), ecP.getG(), ecP.getN(), ecP.getH(), ecP.getSeed()); } ECPrivateKeyStructure ec = new ECPrivateKeyStructure( (ASN1Sequence)keyInfo.getPrivateKey()); return new ECPrivateKeyParameters(ec.getKey(), dParams); } else { throw new RuntimeException("algorithm identifier in key not recognised"); } } }
src/org/bouncycastle/crypto/util/PrivateKeyFactory.java
package org.bouncycastle.crypto.util; import org.bouncycastle.asn1.ASN1InputStream; import org.bouncycastle.asn1.ASN1Object; import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.DEREncodable; import org.bouncycastle.asn1.DERInteger; import org.bouncycastle.asn1.DERObject; import org.bouncycastle.asn1.DERObjectIdentifier; import org.bouncycastle.asn1.nist.NISTNamedCurves; import org.bouncycastle.asn1.oiw.ElGamalParameter; import org.bouncycastle.asn1.oiw.OIWObjectIdentifiers; import org.bouncycastle.asn1.pkcs.DHParameter; import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers; import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; import org.bouncycastle.asn1.pkcs.RSAPrivateKeyStructure; import org.bouncycastle.asn1.sec.ECPrivateKeyStructure; import org.bouncycastle.asn1.sec.SECNamedCurves; import org.bouncycastle.asn1.teletrust.TeleTrusTNamedCurves; import org.bouncycastle.asn1.x509.AlgorithmIdentifier; import org.bouncycastle.asn1.x509.DSAParameter; import org.bouncycastle.asn1.x9.X962NamedCurves; import org.bouncycastle.asn1.x9.X962Parameters; import org.bouncycastle.asn1.x9.X9ECParameters; import org.bouncycastle.asn1.x9.X9ObjectIdentifiers; import org.bouncycastle.crypto.params.AsymmetricKeyParameter; import org.bouncycastle.crypto.params.DHParameters; import org.bouncycastle.crypto.params.DHPrivateKeyParameters; import org.bouncycastle.crypto.params.DSAParameters; import org.bouncycastle.crypto.params.DSAPrivateKeyParameters; import org.bouncycastle.crypto.params.ECDomainParameters; import org.bouncycastle.crypto.params.ECPrivateKeyParameters; import org.bouncycastle.crypto.params.ElGamalParameters; import org.bouncycastle.crypto.params.ElGamalPrivateKeyParameters; import org.bouncycastle.crypto.params.RSAPrivateCrtKeyParameters; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; /** * Factory for creating private key objects from PKCS8 PrivateKeyInfo objects. */ public class PrivateKeyFactory { /** * Create a private key parameter from a PKCS8 PrivateKeyInfo encoding. * * @param privateKeyInfoData the PrivateKeyInfo encoding * @return a suitable private key parameter * @throws IOException on an error decoding the key */ public static AsymmetricKeyParameter createKey( byte[] privateKeyInfoData) throws IOException { return createKey( PrivateKeyInfo.getInstance( ASN1Object.fromByteArray(privateKeyInfoData))); } /** * Create a private key parameter from a PKCS8 PrivateKeyInfo encoding read from a stream. * * @param inStr the stream to read the PrivateKeyInfo encoding from * @return a suitable private key parameter * @throws IOException on an error decoding the key */ public static AsymmetricKeyParameter createKey( InputStream inStr) throws IOException { return createKey( PrivateKeyInfo.getInstance( new ASN1InputStream(inStr).readObject())); } /** * Create a private key parameter from the passed in PKCS8 PrivateKeyInfo object. * * @param keyInfo the PrivateKeyInfo object containing the key material * @return a suitable private key parameter * @throws IOException on an error decoding the key */ public static AsymmetricKeyParameter createKey( PrivateKeyInfo keyInfo) throws IOException { AlgorithmIdentifier algId = keyInfo.getAlgorithmId(); if (algId.getObjectId().equals(PKCSObjectIdentifiers.rsaEncryption)) { RSAPrivateKeyStructure keyStructure = new RSAPrivateKeyStructure((ASN1Sequence)keyInfo.getPrivateKey()); return new RSAPrivateCrtKeyParameters( keyStructure.getModulus(), keyStructure.getPublicExponent(), keyStructure.getPrivateExponent(), keyStructure.getPrime1(), keyStructure.getPrime2(), keyStructure.getExponent1(), keyStructure.getExponent2(), keyStructure.getCoefficient()); } else if (algId.getObjectId().equals(PKCSObjectIdentifiers.dhKeyAgreement)) { DHParameter params = new DHParameter((ASN1Sequence)keyInfo.getAlgorithmId().getParameters()); DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); BigInteger lVal = params.getL(); int l = lVal == null ? 0 : lVal.intValue(); DHParameters dhParams = new DHParameters(params.getP(), params.getG(), null, l); return new DHPrivateKeyParameters(derX.getValue(), dhParams); } else if (algId.getObjectId().equals(OIWObjectIdentifiers.elGamalAlgorithm)) { ElGamalParameter params = new ElGamalParameter((ASN1Sequence)keyInfo.getAlgorithmId().getParameters()); DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); return new ElGamalPrivateKeyParameters(derX.getValue(), new ElGamalParameters(params.getP(), params.getG())); } else if (algId.getObjectId().equals(X9ObjectIdentifiers.id_dsa)) { DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); DEREncodable de = keyInfo.getAlgorithmId().getParameters(); DSAParameters parameters = null; if (de != null) { DSAParameter params = DSAParameter.getInstance(de.getDERObject()); parameters = new DSAParameters(params.getP(), params.getQ(), params.getG()); } return new DSAPrivateKeyParameters(derX.getValue(), parameters); } else if (algId.getObjectId().equals(X9ObjectIdentifiers.id_ecPublicKey)) { X962Parameters params = new X962Parameters((DERObject)keyInfo.getAlgorithmId().getParameters()); ECDomainParameters dParams = null; if (params.isNamedCurve()) { DERObjectIdentifier oid = (DERObjectIdentifier)params.getParameters(); X9ECParameters ecP = X962NamedCurves.getByOID(oid); if (ecP == null) { ecP = SECNamedCurves.getByOID(oid); if (ecP == null) { ecP = NISTNamedCurves.getByOID(oid); if (ecP == null) { ecP = TeleTrusTNamedCurves.getByOID(oid); } } } dParams = new ECDomainParameters( ecP.getCurve(), ecP.getG(), ecP.getN(), ecP.getH(), ecP.getSeed()); } else { X9ECParameters ecP = new X9ECParameters( (ASN1Sequence)params.getParameters()); dParams = new ECDomainParameters( ecP.getCurve(), ecP.getG(), ecP.getN(), ecP.getH(), ecP.getSeed()); } ECPrivateKeyStructure ec = new ECPrivateKeyStructure((ASN1Sequence)keyInfo.getPrivateKey()); return new ECPrivateKeyParameters(ec.getKey(), dParams); } else { throw new RuntimeException("algorithm identifier in key not recognised"); } } }
Add TODO
src/org/bouncycastle/crypto/util/PrivateKeyFactory.java
Add TODO
<ide><path>rc/org/bouncycastle/crypto/util/PrivateKeyFactory.java <ide> package org.bouncycastle.crypto.util; <add> <add>import java.io.IOException; <add>import java.io.InputStream; <add>import java.math.BigInteger; <ide> <ide> import org.bouncycastle.asn1.ASN1InputStream; <ide> import org.bouncycastle.asn1.ASN1Object; <ide> import org.bouncycastle.crypto.params.ElGamalPrivateKeyParameters; <ide> import org.bouncycastle.crypto.params.RSAPrivateCrtKeyParameters; <ide> <del>import java.io.IOException; <del>import java.io.InputStream; <del>import java.math.BigInteger; <del> <ide> /** <ide> * Factory for creating private key objects from PKCS8 PrivateKeyInfo objects. <ide> */ <ide> * @return a suitable private key parameter <ide> * @throws IOException on an error decoding the key <ide> */ <del> public static AsymmetricKeyParameter createKey( <del> byte[] privateKeyInfoData) <del> throws IOException <add> public static AsymmetricKeyParameter createKey(byte[] privateKeyInfoData) throws IOException <ide> { <del> return createKey( <del> PrivateKeyInfo.getInstance( <del> ASN1Object.fromByteArray(privateKeyInfoData))); <add> return createKey(PrivateKeyInfo.getInstance(ASN1Object.fromByteArray(privateKeyInfoData))); <ide> } <ide> <ide> /** <del> * Create a private key parameter from a PKCS8 PrivateKeyInfo encoding read from a stream. <add> * Create a private key parameter from a PKCS8 PrivateKeyInfo encoding read from a <add> * stream. <ide> * <ide> * @param inStr the stream to read the PrivateKeyInfo encoding from <ide> * @return a suitable private key parameter <ide> * @throws IOException on an error decoding the key <ide> */ <del> public static AsymmetricKeyParameter createKey( <del> InputStream inStr) <del> throws IOException <add> public static AsymmetricKeyParameter createKey(InputStream inStr) throws IOException <ide> { <del> return createKey( <del> PrivateKeyInfo.getInstance( <del> new ASN1InputStream(inStr).readObject())); <add> return createKey(PrivateKeyInfo.getInstance(new ASN1InputStream(inStr).readObject())); <ide> } <ide> <ide> /** <ide> * @return a suitable private key parameter <ide> * @throws IOException on an error decoding the key <ide> */ <del> public static AsymmetricKeyParameter createKey( <del> PrivateKeyInfo keyInfo) <del> throws IOException <add> public static AsymmetricKeyParameter createKey(PrivateKeyInfo keyInfo) throws IOException <ide> { <del> AlgorithmIdentifier algId = keyInfo.getAlgorithmId(); <del> <add> AlgorithmIdentifier algId = keyInfo.getAlgorithmId(); <add> <ide> if (algId.getObjectId().equals(PKCSObjectIdentifiers.rsaEncryption)) <ide> { <del> RSAPrivateKeyStructure keyStructure = new RSAPrivateKeyStructure((ASN1Sequence)keyInfo.getPrivateKey()); <add> RSAPrivateKeyStructure keyStructure = new RSAPrivateKeyStructure( <add> (ASN1Sequence)keyInfo.getPrivateKey()); <ide> <del> return new RSAPrivateCrtKeyParameters( <del> keyStructure.getModulus(), <del> keyStructure.getPublicExponent(), <del> keyStructure.getPrivateExponent(), <del> keyStructure.getPrime1(), <del> keyStructure.getPrime2(), <del> keyStructure.getExponent1(), <del> keyStructure.getExponent2(), <del> keyStructure.getCoefficient()); <add> return new RSAPrivateCrtKeyParameters(keyStructure.getModulus(), <add> keyStructure.getPublicExponent(), keyStructure.getPrivateExponent(), <add> keyStructure.getPrime1(), keyStructure.getPrime2(), keyStructure.getExponent1(), <add> keyStructure.getExponent2(), keyStructure.getCoefficient()); <ide> } <add> // TODO? <add>// else if (algId.getObjectId().equals(X9ObjectIdentifiers.dhpublicnumber)) <ide> else if (algId.getObjectId().equals(PKCSObjectIdentifiers.dhKeyAgreement)) <ide> { <del> DHParameter params = new DHParameter((ASN1Sequence)keyInfo.getAlgorithmId().getParameters()); <del> DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); <add> DHParameter params = new DHParameter( <add> (ASN1Sequence)keyInfo.getAlgorithmId().getParameters()); <add> DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); <ide> <ide> BigInteger lVal = params.getL(); <ide> int l = lVal == null ? 0 : lVal.intValue(); <ide> } <ide> else if (algId.getObjectId().equals(OIWObjectIdentifiers.elGamalAlgorithm)) <ide> { <del> ElGamalParameter params = new ElGamalParameter((ASN1Sequence)keyInfo.getAlgorithmId().getParameters()); <del> DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); <add> ElGamalParameter params = new ElGamalParameter( <add> (ASN1Sequence)keyInfo.getAlgorithmId().getParameters()); <add> DERInteger derX = (DERInteger)keyInfo.getPrivateKey(); <ide> <del> return new ElGamalPrivateKeyParameters(derX.getValue(), new ElGamalParameters(params.getP(), params.getG())); <add> return new ElGamalPrivateKeyParameters(derX.getValue(), new ElGamalParameters( <add> params.getP(), params.getG())); <ide> } <ide> else if (algId.getObjectId().equals(X9ObjectIdentifiers.id_dsa)) <ide> { <ide> } <ide> else if (algId.getObjectId().equals(X9ObjectIdentifiers.id_ecPublicKey)) <ide> { <del> X962Parameters params = new X962Parameters((DERObject)keyInfo.getAlgorithmId().getParameters()); <del> ECDomainParameters dParams = null; <del> <add> X962Parameters params = new X962Parameters( <add> (DERObject)keyInfo.getAlgorithmId().getParameters()); <add> ECDomainParameters dParams = null; <add> <ide> if (params.isNamedCurve()) <ide> { <ide> DERObjectIdentifier oid = (DERObjectIdentifier)params.getParameters(); <del> X9ECParameters ecP = X962NamedCurves.getByOID(oid); <add> X9ECParameters ecP = X962NamedCurves.getByOID(oid); <ide> <ide> if (ecP == null) <ide> { <ide> } <ide> } <ide> <del> dParams = new ECDomainParameters( <del> ecP.getCurve(), <del> ecP.getG(), <del> ecP.getN(), <del> ecP.getH(), <del> ecP.getSeed()); <add> dParams = new ECDomainParameters(ecP.getCurve(), ecP.getG(), ecP.getN(), <add> ecP.getH(), ecP.getSeed()); <ide> } <ide> else <ide> { <del> X9ECParameters ecP = new X9ECParameters( <del> (ASN1Sequence)params.getParameters()); <del> dParams = new ECDomainParameters( <del> ecP.getCurve(), <del> ecP.getG(), <del> ecP.getN(), <del> ecP.getH(), <del> ecP.getSeed()); <add> X9ECParameters ecP = new X9ECParameters((ASN1Sequence)params.getParameters()); <add> dParams = new ECDomainParameters(ecP.getCurve(), ecP.getG(), ecP.getN(), <add> ecP.getH(), ecP.getSeed()); <ide> } <ide> <del> ECPrivateKeyStructure ec = new ECPrivateKeyStructure((ASN1Sequence)keyInfo.getPrivateKey()); <add> ECPrivateKeyStructure ec = new ECPrivateKeyStructure( <add> (ASN1Sequence)keyInfo.getPrivateKey()); <ide> <ide> return new ECPrivateKeyParameters(ec.getKey(), dParams); <ide> }
Java
mit
bbeaa79cf8737e8759543dbd4a5644f22e37ce05
0
testmycode/tmc-cli,testmycode/tmc-cli,tmc-cli/tmc-cli,tmc-cli/tmc-cli
package fi.helsinki.cs.tmc.cli.command; import fi.helsinki.cs.tmc.cli.Application; import fi.helsinki.cs.tmc.cli.tmcstuff.Settings; import fi.helsinki.cs.tmc.cli.tmcstuff.SettingsIo; import fi.helsinki.cs.tmc.cli.tmcstuff.TmcUtil; import fi.helsinki.cs.tmc.core.TmcCore; import fi.helsinki.cs.tmc.core.domain.Course; import fi.helsinki.cs.tmc.core.domain.ProgressObserver; import fi.helsinki.cs.tmc.core.exceptions.FailedHttpResponseException; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; import java.util.Scanner; import java.util.concurrent.Callable; public class LoginCommand implements Command { private static final Logger logger = LoggerFactory.getLogger(TmcUtil.class); // todo: use our own terminal IO when available private final Scanner scanner; private final GnuParser parser; private final Options options; private Application app; public LoginCommand(Application app) { this.app = app; this.scanner = new Scanner(System.in); this.parser = new GnuParser(); this.options = new Options(); options.addOption("u", "user", true, "TMC username"); options.addOption("p", "password", true, "Password for the user"); options.addOption("s", "server", true, "Address for TMC server"); } @Override public String getDescription() { return "Login to TMC server."; } @Override public String getName() { return "login"; } @Override public void run(String[] args) { String username = null; String password = null; String serverAddress = null; // todo: clean this. try { CommandLine line = this.parser.parse(options, args); username = line.getOptionValue("u"); if (username == null) { username = readLine("username: "); } password = line.getOptionValue("p"); if (password == null) { password = readPassword("password: "); } serverAddress = line.getOptionValue("s"); if (serverAddress == null) { // todo: don't hardcode the default value, get it from somewhere serverAddress = "https://tmc.mooc.fi"; } } catch (ParseException | IOException e) { logger.error("Unable to parse username or password."); } Settings settings = new Settings(serverAddress, username, password); if (loginPossible(settings)) { SettingsIo settingsIo = new SettingsIo(); if (settingsIo.save(settings)) { System.out.println("Login successful!"); } else { System.out.println("Failed to write config file. " + "Login failed."); } } else { System.out.println("Login failed."); } } /** * Try to contact TMC server. If successful, user exists. * * @return True if user exist */ private boolean loginPossible(Settings settings) { app.createTmcCore(settings); TmcCore core = this.app.getTmcCore(); Callable<List<Course>> callable = core.listCourses( ProgressObserver.NULL_OBSERVER); try { callable.call(); } catch (Exception e) { Throwable cause = e.getCause(); if (cause instanceof FailedHttpResponseException) { FailedHttpResponseException httpEx = (FailedHttpResponseException) cause; if (httpEx.getStatusCode() == 401) { System.out.println("Incorrect username or password."); return false; } } System.out.println("Unable to connect to server " + settings.getServerAddress()); return false; } return true; } // todo: use our own terminal IO when available private String readLine(String prompt) throws IOException { System.out.print(prompt); return scanner.nextLine(); } // todo: use our own terminal IO when available private String readPassword(String prompt) throws IOException { // Read the password in cleartext if no console is present (might happen // in some IDEs?) if (System.console() != null) { char[] pwd = System.console().readPassword(prompt); return new String(pwd); } logger.info("System.console not present, unable to read password " + "securely. Reading password in cleartext."); return this.readLine(prompt); } }
src/main/java/fi/helsinki/cs/tmc/cli/command/LoginCommand.java
package fi.helsinki.cs.tmc.cli.command; import fi.helsinki.cs.tmc.cli.Application; import fi.helsinki.cs.tmc.cli.tmcstuff.Settings; import fi.helsinki.cs.tmc.cli.tmcstuff.SettingsIo; import fi.helsinki.cs.tmc.cli.tmcstuff.TmcUtil; import fi.helsinki.cs.tmc.core.TmcCore; import fi.helsinki.cs.tmc.core.domain.Course; import fi.helsinki.cs.tmc.core.domain.ProgressObserver; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.List; import java.util.Scanner; import java.util.concurrent.Callable; public class LoginCommand implements Command { private static final Logger logger = LoggerFactory.getLogger(TmcUtil.class); // todo: use our own terminal IO when available private final Scanner scanner; private final GnuParser parser; private final Options options; private Application app; public LoginCommand(Application app) { this.app = app; this.scanner = new Scanner(System.in); this.parser = new GnuParser(); this.options = new Options(); options.addOption("u", "user", true, "TMC username"); options.addOption("p", "password", true, "Password for the user"); options.addOption("s", "server", true, "Address for TMC server"); } @Override public String getDescription() { return "Login to TMC server."; } @Override public String getName() { return "login"; } @Override public void run(String[] args) { String username = null; String password = null; String serverAddress = null; // todo: clean this. try { CommandLine line = this.parser.parse(options, args); username = line.getOptionValue("u"); if (username == null) { username = readLine("username: "); } password = line.getOptionValue("p"); if (password == null) { password = readPassword("password: "); } serverAddress = line.getOptionValue("s"); if (serverAddress == null) { // todo: don't hardcode the default value, get it from somewhere serverAddress = "https://tmc.mooc.fi"; } } catch (ParseException | IOException e) { logger.error("Unable to parse username or password."); } Settings settings = new Settings(serverAddress, username, password); if (loginPossible(settings)) { SettingsIo settingsIo = new SettingsIo(); if (settingsIo.save(settings)) { System.out.println("Login successful!"); } else { System.out.println("Failed to write config file. " + "Login failed."); } } else { System.out.println("Login failed."); } } /** * Try to contact TMC server. If successful, user exists. * * @return True if user exist */ private boolean loginPossible(Settings settings) { app.createTmcCore(settings); TmcCore core = this.app.getTmcCore(); Callable<List<Course>> callable = core.listCourses( ProgressObserver.NULL_OBSERVER); try { callable.call(); } catch (Exception e) { logger.error("Unable to login into server " + settings.getServerAddress()); // todo: if 401, 404 do something return false; } return true; } // todo: use our own terminal IO when available private String readLine(String prompt) throws IOException { System.out.print(prompt); return scanner.nextLine(); } // todo: use our own terminal IO when available private String readPassword(String prompt) throws IOException { // Read the password in cleartext if no console is present (might happen // in some IDEs?) if (System.console() != null) { char[] pwd = System.console().readPassword(prompt); return new String(pwd); } logger.info("System.console not present, unable to read password " + "securely. Reading password in cleartext."); return this.readLine(prompt); } }
Catch HTTP status code 401
src/main/java/fi/helsinki/cs/tmc/cli/command/LoginCommand.java
Catch HTTP status code 401
<ide><path>rc/main/java/fi/helsinki/cs/tmc/cli/command/LoginCommand.java <ide> import fi.helsinki.cs.tmc.core.TmcCore; <ide> import fi.helsinki.cs.tmc.core.domain.Course; <ide> import fi.helsinki.cs.tmc.core.domain.ProgressObserver; <add>import fi.helsinki.cs.tmc.core.exceptions.FailedHttpResponseException; <ide> <ide> import org.apache.commons.cli.CommandLine; <ide> import org.apache.commons.cli.GnuParser; <ide> try { <ide> callable.call(); <ide> } catch (Exception e) { <del> logger.error("Unable to login into server " <add> Throwable cause = e.getCause(); <add> if (cause instanceof FailedHttpResponseException) { <add> FailedHttpResponseException httpEx <add> = (FailedHttpResponseException) cause; <add> if (httpEx.getStatusCode() == 401) { <add> System.out.println("Incorrect username or password."); <add> return false; <add> } <add> } <add> <add> System.out.println("Unable to connect to server " <ide> + settings.getServerAddress()); <del> // todo: if 401, 404 do something <ide> return false; <ide> } <ide>
JavaScript
mit
f0cdb62f7cfc2bf5dfb6ffd97848f4fc8e84680d
0
joshvillahermosa/IS322-BACApp
var Stats = Backbone.View.extend({ el: '#load', initialize: function(){ this.stats= '<h2>Stats</h2><canvas id="chart" width="300" height="300"></canvas>'; this.table = '<table class="table-striped "><thead><tr><th>Date</th><th>Highest BAC</th></tr></thead><tbody id="records"></tbody></table>'; }, render: function(){ this.user = you.retrieveProfile(); this.$el.html(this.stats+''+this.table); this.chartGenerate(); this.populateTable(); }, chartGenerate: function(){ var bacData = this.stringDateAndBac(); var data = { labels: bacData.date, datasets : [ { fillColor : "rgba(220,220,220,0.5)", strokeColor : "rgba(220,220,220,1)", data : bacData.bacLevel //data: [0.0245,0.0645,0.0545] } ] } var ctx = $("#chart").get(0).getContext("2d"); new Chart(ctx).Bar(data); console.log(bacData.bacLevel); }, populateTable: function(){ $('#records').empty(); var userBacRecord = this.user.BACLevels.length - 1; for(var i = userBacRecord; i >= 0; i--){ $('#records').append('<tr><td>'+this.user.BACLevels[i].bacLevelHigh+' -</td><td>'+this.user.BACLevels[i].date+'</td></tr>'); } }, stringDateAndBac: function(){ var userBacRecord = this.user.BACLevels.length - 1; var bacRecord = {bacLevel: [], date: []}; for(var i = userBacRecord; i >= 0; i--){ bacRecord.bacLevel[i] = this.user.BACLevels[i].bacLevelHigh; bacRecord.date[i] = this.user.BACLevels[i].date; } return bacRecord; } }); var stats = new Stats();
js/views/Stats.js
var Stats = Backbone.View.extend({ el: '#load', initialize: function(){ this.stats= '<h2>Stats</h2><canvas id="chart" width="500" height="300"></canvas>'; this.table = '<table class="table-striped "><thead><tr><th>Date</th><th>Highest BAC</th></tr></thead><tbody id="records"></tbody></table>'; }, render: function(){ this.user = you.retrieveProfile(); this.$el.html(this.stats+''+this.table); this.chartGenerate(); this.populateTable(); }, chartGenerate: function(){ var bacData = this.stringDateAndBac(); var data = { labels: [bacData.date.splice(0)], datasets : [ { fillColor : "rgba(220,220,220,0.5)", strokeColor : "rgba(220,220,220,1)", data : bacData.bacLevel //data: [0.0245,0.0645,0.0545] } ] } var ctx = $("#chart").get(0).getContext("2d"); new Chart(ctx).Bar(data); console.log(bacData.bacLevel); }, populateTable: function(){ $('#records').empty(); var userBacRecord = this.user.BACLevels.length - 1; for(var i = userBacRecord; i >= 0; i--){ $('#records').append('<tr><td>'+this.user.BACLevels[i].bacLevelHigh+' -</td><td>'+this.user.BACLevels[i].date+'</td></tr>'); } }, stringDateAndBac: function(){ var userBacRecord = this.user.BACLevels.length - 1; var bacRecord = {bacLevel: [], date: []}; for(var i = userBacRecord; i >= 0; i--){ bacRecord.bacLevel[i] = this.user.BACLevels[i].bacLevelHigh; bacRecord.date[i] = this.user.BACLevels[i].date; } return bacRecord; } }); var stats = new Stats();
Fixed charts rendering
js/views/Stats.js
Fixed charts rendering
<ide><path>s/views/Stats.js <ide> el: '#load', <ide> <ide> initialize: function(){ <del> this.stats= '<h2>Stats</h2><canvas id="chart" width="500" height="300"></canvas>'; <add> this.stats= '<h2>Stats</h2><canvas id="chart" width="300" height="300"></canvas>'; <ide> this.table = '<table class="table-striped "><thead><tr><th>Date</th><th>Highest BAC</th></tr></thead><tbody id="records"></tbody></table>'; <ide> }, <ide> <ide> var bacData = this.stringDateAndBac(); <ide> <ide> var data = { <del> labels: [bacData.date.splice(0)], <add> labels: bacData.date, <ide> datasets : [ <ide> { <ide> fillColor : "rgba(220,220,220,0.5)",
JavaScript
mit
ccf73e338dd554f90d8108ede53bf5c50489f7e6
0
gatechipdl/GameOfLight,gatechipdl/GameOfLight,gatechipdl/GameOfLight,gatechipdl/GameOfLight
'use strict'; const baseVersion = 3005; const express = require('express'); const app = express(); const server = require('http').createServer(app); const io = require('socket.io')(server); const color = require("rgb"); const base64js = require('base64-js'); const path = require('path'); const fs = require('fs'); const md5 = require('md5-file'); const dgram = require('dgram'); const udpSocket = dgram.createSocket('udp4'); var udpPortSend = 60000; //var udpMulticastIP = '230.185.192.109'; var udpPortRecv = 60001; var udpDestIP = '192.168.1.199'; // host everything in the public folder app.use(express.static(__dirname + '/public')); //check github.com/esp8266/Arduino/issues/2228 for example app.get('/update/base',function(req,res){ //check version somehow console.log('a device is requesting an update'); console.dir(req.headers); if(parseInt(req.headers['x-esp8266-version'])!=baseVersion){ //could be < var full_path = path.join(__dirname,'/bin/base'+baseVersion+'.bin'); fs.readFile(full_path,"binary",function(err,file){ if(err){ console.log('error uploading new firmware'); res.writeHeader(500, {"Content-Type": "text/plain"}); res.write(err + "\n"); res.end(); } else{ console.log('uploading new firmware'); res.writeHeader(200, {"Content-Type": "application/octect-stream", "Content-Disposition": "attachment;filename="+path.basename(full_path), "Content-Length": ""+fs.statSync(full_path)["size"], "x-MD5": md5.sync(full_path)}); res.write(file, "binary"); res.end(); } }); } else{ console.log('not uploading new firmware'); res.writeHeader(304, {"Content-Type": "text/plain"}); res.write("304 Not Modified\n"); res.end(); } }); var port = 80; server.listen(port); /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* Station Management */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// var stationData = { '60-01-94-10-89-E5':{ 'online':false, 'id':'2', 'mode':0, 'mac':'60-01-94-10-89-E5', 'ip':'192.168.0.102', 'name':'ESP_1089E5', 'socket':'sdfghj' } }; //var operationModes = { // 0:'StrandTest', // 1:'SlaveListen', // 2:'CapSenseControl', // 3:'StrandTest2', // 4:'StrandTest3' //} function loadStationData(){ var dataFile = fs.readFileSync('./data/stations.json'); stationData = JSON.parse(dataFile); } loadStationData(); //get stationData on server start resetStationData(); function saveStationData(){ var dataJSON = JSON.stringify(stationData); fs.writeFileSync('./data/stations.json',dataJSON); } // removes all data entries function clearStationData(){ Object.keys(stationData).forEach(function(key) { delete stationData[key]; }); } // sets all data entries' online to false // run at start function resetStationData(){ Object.keys(stationData).forEach(function(key) { stationData[key]['online'] = false; }); } function updateStationData(data){ Object.keys(data).forEach(function(key) { Object.assign(stationData[key],data[key]); //merge data }); io.sockets.emit('syncStationData',stationData); } function stationDataListener(socket){ socket.on('idhostnameipmac',function(data){ //TODO: parse the pieces and keep in a managed list console.log('idhostnameipmac'); console.log(data); //parse and add to managed list var aData = data.split(","); // stationId_str + "," // + String(WiFi.hostname()) + "," // + String(WiFi.localIP()) + "," // + String(mac[5], HEX) + "," // + String(mac[4], HEX) + "," // + String(mac[3], HEX) + "," // + String(mac[2], HEX) + "," // + String(mac[1], HEX) + "," // + String(mac[0], HEX)).c_str() var mData = aData[8]+"-"+aData[7]+"-"+aData[6]+"-"+aData[5]+"-"+aData[4]+"-"+aData[3]; var sData = { [mData]:{ 'online':true, 'id':aData[0], 'mode':0, 'mac':mData, 'ip':aData[2], 'name':aData[1], 'socket':socket['id'], 'firmware':'unknown' } } console.log("sData"); console.log(sData); updateStationData(sData) }); } //changes the id value of a number function setStationIdListener(socket){ socket.on('setStationId',function(data){ if( !isNaN(data['stationId'])){ if( Number.parseInt(data['stationId'])){ console.log('setting mac '+data['mac']+' to station '+data['stationId']); io.to([stationData[data['mac']]['socket']]).emit('setStationId',base64js.fromByteArray(new Uint16Array([data['stationId']]))); //update station data updateStationData({ [data['mac']]:{ 'id':data['stationId'] } }); } else{ console.log('stationId not an integer: '+data); } }else{ console.log('data issue: '+data); console.dir(data); } }) } function setStationModeListener(socket){ socket.on('setStationMode',function(data){ var data64 = base64js.fromByteArray(new Uint8Array([data['modeId']])); io.to([stationData[data['mac']]['socket']]).emit(data64); updateStationData({ [data['mac']]:{ 'mode':data['modeId'] } }); }); } function pingStationListener(socket){ socket.on('pingStation',function(data){ //var dataBuffer = new Uint8Array([startIndex,numToFill,ledColor.r,ledColor.g,ledColor.b]); var dataBuffer = new Uint8Array([0,45,0,255,0]); //all green io.to([stationData[data['mac']]['socket']]).emit('fillSolid',base64js.fromByteArray(dataBuffer)); }); } function checkForUpdateListener(socket){ socket.on('checkForUpdate',function(data){ io.to([stationData[data['mac']]['socket']]).emit('checkForUpdate',""); }); } //var configFile = fs.readFileSync('./databaseTest.json'); //var config = JSON.parse(configFile); //config.push({"totalTapScore": totTap, "tileId": tileId, "timeStamp": time}); ////console.log(' db now is, ', config) //var configJSON = JSON.stringify(config); //fs.writeFileSync('./databaseTest.json', configJSON); //stations = [ // ESP_1089E5 60-01-94-10-89-E5 192.168.0.102 01:09:16 //3 ESP_0FF841 60-01-94-0F-F8-41 192.168.0.103 01:09:18 //4 ESP_108363 60-01-94-10-83-63 192.168.0.104 01:09:16 //5 ESP_108507 60-01-94-10-85-07 192.168.0.105 01:40:14 //6 ESP_0FF49E 60-01-94-0F-F4-9E 192.168.0.106 01:09:16 //7 ESP_108BE2 60-01-94-10-8B-E2 192.168.0.107 01:09:16 //8 ESP_0FF846 60-01-94-0F-F8-46 192.168.0.108 01:09:16 //9 ESP_0E7780 60-01-94-0E-77-80 192.168.0.109 01:09:15 //10 ESP_1083B1 60-01-94-10-83-B1 192.168.0.110 01:09:15 //11 ESP_108B42 60-01-94-10-8B-42 192.168.0.111 01:09:16 //12 ESP_0E7772 60-01-94-0E-77-72 192.168.0.112 01:09:20 //13 ESP_108446 60-01-94-10-84-46 192.168.0.113 01:40:16 //14 ESP_107D19 60-01-94-10-7D-19 192.168.0.114 01:09:16 //15 ESP_0E75FE 60-01-94-0E-75-FE 192.168.0.115 01:09:16 //16 ESP_0E777D 60-01-94-0E-77-7D 192.168.0.116 01:09:16 //17 ESP_10835B 60-01-94-10-83-5B 192.168.0.117 01:09:16 //18 ESP_0FF684 60-01-94-0F-F6-84 192.168.0.118 01:09:16 //19 ESP_1080FB 60-01-94-10-80-FB 192.168.0.119 01:09:16 //20 ESP_0FF4BB 60-01-94-0F-F4-BB 192.168.0.120 01:09:16 //21 ESP_0E7545 60-01-94-0E-75-45 192.168.0.121 01:40:14 //22 ESP_1083C7 60-01-94-10-83-C7 192.168.0.122 01:09:16 //23 ESP_107EC9 60-01-94-10-7E-C9 192.168.0.123 01:09:16 //24 ESP_0FF398 60-01-94-0F-F3-98 192.168.0.124 00:38:29 //25 ESP_108A60 60-01-94-10-8A-60 192.168.0.125 01:38:09 //26 ESP_0FF878 60-01-94-0F-F8-78 192.168.0.126 01:09:16 //27 ESP_0FF634 60-01-94-0F-F6-34 192.168.0.127 01:09:16 //28 ESP_108C51 60-01-94-10-8C-51 192.168.0.128 01:09:16 //29 ESP_0E7620 60-01-94-0E-76-20 192.168.0.129 01:40:29 //30 ESP_107CA2 60-01-94-10-7C-A2 192.168.0.130 01:09:16 //31 ESP_108167 60-01-94-10-81-67 192.168.0.131 01:09:16 //32 ESP_10818B 60-01-94-10-81-8B 192.168.0.132 01:09:16 //33 ESP_0E7563 60-01-94-0E-75-63 192.168.0.133 01:40:14 //34 ESP_107FB3 60-01-94-10-7F-B3 /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* test.html Socket Methods */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// function setModeRawListener(socket){ socket.on('setModeRaw',function(dataRaw){ console.log(dataRaw); var data64 = base64js.fromByteArray(new Uint8Array([dataRaw])); console.log(data64); //socket.broadcast.to('stations').emit('setMode',data64); - these didn't work //io.sockets.to('stations').emit('setMode',data64); - these didn't work io.sockets.emit('setMode',data64); }); } function checkForUpdatesListener(socket){ socket.on('checkForUpdates',function(){ io.sockets.emit('checkForUpdate',""); }); } function setFiveHueColorsListener(socket){ socket.on('setFiveHueColors',function(){ fiveColors = new Array(5).fill(new CRGB(0,0,0)); hue = (hue+10)%hueBase; console.log(hue); var t_hue = hue; t_hue = (hue+10)%hueBase; fiveColors[0] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+20)%hueBase; fiveColors[1] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+30)%hueBase; fiveColors[2] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+40)%hueBase; fiveColors[3] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+50)%hueBase; fiveColors[4] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); var dataBuffer = new Uint8Array([ fiveColors[0].r,fiveColors[0].g,fiveColors[0].b, fiveColors[1].r,fiveColors[1].g,fiveColors[1].b, fiveColors[2].r,fiveColors[2].g,fiveColors[2].b, fiveColors[3].r,fiveColors[3].g,fiveColors[3].b, fiveColors[4].r,fiveColors[4].g,fiveColors[4].b ]); var data64 = base64js.fromByteArray(dataBuffer); io.sockets.emit('setFives',data64); }); } function clearColorsListener(socket){ socket.on('clearColors',function(){{ console.log('clear colors'); io.sockets.emit('clear',""); }}); } /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* SYNC Data */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// //var uiSettings = { // 'lastElementChanged':'test', // 'elements':{ // 'test':'value' // } //}; // //function syncUISettings(socket){ // socket.on('syncUISettings',function(data){ // uiSettings['elements'][Object.keys(data)[0]] = data[Object.keys(data)[0]];;// = merge.recursive(true,data,uiSettings['elements']); // // console.log(Object.keys(data)[0]); // uiSettings['lastElementChanged'] = Object.keys(data)[0]; //assumes only one pair // //console.log(uiSettings['lastElementChanged']); // // //broadcast the updates and change for other clients to process // socket.broadcast.emit('syncUISettings',data); // }); //} //var modelData = { // //'0':[0,0,0,4,0,0,127] // //'1':[x,y,z,s,r,g,b] //}; // //function syncModelData(socket){ // socket.on('syncModelData',function(data){ // Object.keys(data).forEach(function(key) { // modelData[key] = data[key]; // }); // //broadcast the updates and change for other clients to process // socket.broadcast.emit('syncModelData',data); // }); //} // //var systemVariables = { // 'availableId':0 //}; // //function syncSystemVariables(socket){ // socket.on('syncSystemVariables',function(data){ // Object.keys(data).forEach(function(key) { // systemVariables[key] = data[key]; // }); // //broadcast the updates and change for other clients to process // socket.broadcast.emit('syncSystemVariables',data); // }); //} /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* UDP */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// udpSocket.on('error', (err) =>{ console.log('udp socket error:\n${err.stack}'); console.log('closing the udp socket'); udpServer.close(); //TODO add function to restart the udp socket }); udpSocket.on('message', (msg, rinfo) => { console.log('udp socket got: ',msg,' from ',rinfo.address,':',rinfo.port); parseData(msg); }); udpSocket.on('listening', () => { const address = udpSocket.address(); console.log('udp socket listening ',address.address,':',address.port); }); udpSocket.bind(udpPortRecv, () => {}); //udpSocket.bind(udpPortSend, function(){ // udpSocket.setBroadcast(true); // udpSocket.setMulticastTTL(128); // udpSocket.addMembership(udpMulticastIP); //}); /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* Socket.IO */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// var clientSockets = {}; //node id //socket io id //ip address //udp send port: 60000 io.on('connection',function(socket){ console.log("client "+socket['id']+" connected"); ClearAll(); stationDataListener(socket); socket.on('subscribe',function(roomName){ socket.join(roomName); console.log("client "+socket['id']+" joined room "+roomName); console.dir(socket.handshake.address); var tAddress = socket.handshake.address; var idx = tAddress.replace(/^.*:/,''); //chop down ipv6 to ipv4 console.log(idx); clientSockets[socket['id']] = { 'iosocket':socket['id'], 'ipaddress':idx } socket.on('disconnect',function(){ for(var key in clientSockets){ if(clientSockets[key]['iosocket']==socket['id']){ delete clientSockets[key]; } } }); if(roomName=='stations'){ CheckForUpdate(roomName); } if(roomName=='browsers'){ socket.emit('syncStationData',stationData); setStationIdListener(socket); setStationModeListener(socket); pingStationListener(socket); checkForUpdateListener(socket); setModeRawListener(socket); checkForUpdatesListener(socket); setFiveHueColorsListener(socket); clearColorsListener(socket); } }); }); const STATION_COUNT = 36; const LED_CLUSTER_COUNT = 45; // actual LED count is 45*3 = 135 //the locally stored color array for each station var colors = new Array(STATION_COUNT).fill(new Array(LED_CLUSTER_COUNT).fill(new CRGB(0,0,0))); /* * prototype function for CRGB data to mimic the FastLED library */ function CRGB(red, green, blue){ this.r = red; this.g = green; this.b = blue; this.int = function(){ return r>>16+g>>8+b>>0; } this.setRGB = function(red, green, blue){ this.r = red; this.g = green; this.b = blue; } this.setHSV = function(hue, sat, val){ var temp = HSVtoRGB(hue,sat,val); this.r = temp.r; this.g = temp.g; this.b = temp.b; } } /* * Turn off LEDs on a specific station */ function Clear(stationId){ io.sockets.to(stationId).emit('clear',''); } /* * Turn off LEDs on all stations */ function ClearAll(){ io.sockets.to('stations').emit('clear',''); } /* * mimics FastLED FillSolid method */ function FillSolid(stationId,startIndex,numToFill,ledColor){ if(!isNaN(stationId)){ //update server's copy of the LED cluster state for(var i=startIndex;i<startIndex+numToFill;i++){ colors[stationId][i].r = ledColor.r; colors[stationId][i].g = ledColor.g; colors[stationId][i].b = ledColor.b; } } var dataBuffer = new Uint8Array([startIndex,numToFill,ledColor.r,ledColor.g,ledColor.b]); io.sockets.to(stationId).emit('fillSolid',dataBuffer); } /* * Set a single LED color on a specific station */ function SetColor(stationId,startIndex,ledColor){ if(!isNaN(stationId)){ //update server's copy of the LED custer state colors[stationId][startIndex].r = ledColor.r; colors[stationId][startIndex].g = ledColor.g; colors[stationId][startIndex].b = ledColor.b; } var dataBuffer = new Uint8Array([startIndex,ledColor.r,ledColor.g,ledColor.b]); io.sockets.to(stationId).emit('setColor',dataBuffer); } /* * Send a set of different colors to a subset of a specific station * Could be the whole station * the number of leds is computed by the lenth of the colorArray * colorArray is an array of CRGB */ function SetColors(stationId,startIndex,colorArray){ //update server's copy of the LED custer state colors[stationId][ledIndex].r = ledColor.r; colors[stationId][ledIndex].g = ledColor.g; colors[stationId][ledIndex].b = ledColor.b; var dataBuffer = new Uint8Array([ledIndex,numToFill,ledColor.r,ledColor.g,ledColor.b]); io.sockets.to(stationId).emit('setColors',dataBuffer); } /* * GOL Station 5 segment code */ function SetFiveColors(stationId,fiveColorArray){ if(!isNaN(stationId)){ //update server's copy of the LED custer state if(stationId<STATION_COUNT){ for(var i=0;i<5;i++){ for(var j=0;j<LED_CLUSTER_COUNT/5;j++){ colors[stationId][i*9+j].r = fiveColorArray[i].r; colors[stationId][i*9+j].g = fiveColorArray[i].g; colors[stationId][i*9+j].b = fiveColorArray[i].b; } } } } // var dataArrayBuffer = new ArrayBuffer(15); // var dataBuffer = new Uint8Array(dataArrayBuffer); // dataArrayBuffer[0]=fiveColorArray[0].r; // dataArrayBuffer[1]=fiveColorArray[0].g; // dataArrayBuffer[2]=fiveColorArray[0].b; // // dataArrayBuffer[3]=fiveColorArray[1].r; // dataArrayBuffer[4]=fiveColorArray[1].g; // dataArrayBuffer[5]=fiveColorArray[1].b; // // dataArrayBuffer[6]=fiveColorArray[2].r; // dataArrayBuffer[7]=fiveColorArray[2].g; // dataArrayBuffer[8]=fiveColorArray[2].b; // // dataArrayBuffer[9]=fiveColorArray[3].r; // dataArrayBuffer[10]=fiveColorArray[3].g; // dataArrayBuffer[11]=fiveColorArray[3].b; // // dataArrayBuffer[12]=fiveColorArray[4].r; // dataArrayBuffer[13]=fiveColorArray[4].g; // dataArrayBuffer[14]=fiveColorArray[4].b; var dataBuffer2 = new Uint8Array([ fiveColorArray[0].r,fiveColorArray[0].g,fiveColorArray[0].b, fiveColorArray[1].r,fiveColorArray[1].g,fiveColorArray[1].b, fiveColorArray[2].r,fiveColorArray[2].g,fiveColorArray[2].b, fiveColorArray[3].r,fiveColorArray[3].g,fiveColorArray[3].b, fiveColorArray[4].r,fiveColorArray[4].g,fiveColorArray[4].b ]); io.sockets.to(stationId).emit('setFives',base64js.fromByteArray(dataBuffer2)); //console.log(base64js.fromByteArray(dataBuffer2)); } /* * Set the colors on the stations to the colors stored on the server * typially used on startup to retrieve the last stored state of the system */ function SyncColorsFromServer(){ for(var i=0;i<STATION_COUNT;i++){ SetColors(i,0,colors[i]); } } /* * sets entire strip colors at once * typically used for loading last saved state * or used for more efficient devliery of complete color changes */ function SetStrip(stationId,colorArray){ } /* * Force clients to check for firmware updates */ function CheckForUpdate(stationId){ io.sockets.to(stationId).emit('checkForUpdate',""); } /* * Helper method for HSV color model */ function HSVtoRGB(h, s, v) { var r, g, b, i, f, p, q, t; if (arguments.length === 1) { s = h.s, v = h.v, h = h.h; } i = Math.floor(h * 6); f = h * 6 - i; p = v * (1 - s); q = v * (1 - f * s); t = v * (1 - (1 - f) * s); switch (i % 6) { case 0: r = v, g = t, b = p; break; case 1: r = q, g = v, b = p; break; case 2: r = p, g = v, b = t; break; case 3: r = p, g = q, b = v; break; case 4: r = t, g = p, b = v; break; case 5: r = v, g = p, b = q; break; } return { r: Math.round(r * 255), g: Math.round(g * 255), b: Math.round(b * 255) }; } /* * Rainbow test code below here */ var hue = 120; var hueBase = 360; var colorString = ''; var fiveColors = new Array(5).fill(new CRGB(0,0,0)); var doStuff1 = function(){ hue = (hue+6)%hueBase; var t_hue = hue; for(var i=0;i<STATION_COUNT;i++){ t_hue = (hue+1)%hueBase; fiveColors[0] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+2)%hueBase; fiveColors[1] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+3)%hueBase; fiveColors[2] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+4)%hueBase; fiveColors[3] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+5)%hueBase; fiveColors[4] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); SetFiveColors(i,fiveColors); } fiveColors[0] = HSVtoRGB(0.0,1.0,1.0); fiveColors[1] = HSVtoRGB(0.0,1.0,1.0); fiveColors[2] = HSVtoRGB(0.0,1.0,1.0); fiveColors[3] = HSVtoRGB(0.0,1.0,1.0); fiveColors[4] = HSVtoRGB(0.0,1.0,1.0); SetFiveColors(24932,fiveColors); console.log(fiveColors[0]); }; //setInterval(doStuff1,5000); var doStuff2 = function(){ hue = (hue+1)%hueBase; var hue2 = (Math.floor(hue/6)*6)%hueBase; fiveColors[0] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[1] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[2] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[3] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[4] = HSVtoRGB(hue2/(hueBase),1.0,1.0); SetFiveColors('allStations',fiveColors); console.log(fiveColors[0]); //FillSolid('allStations',0,LED_CLUSTER_COUNT,HSVtoRGB(hue/(hueBase),1.0,1.0)); } //setInterval(doStuff2,10000/60); function sendUDPSocket(msg,nodeIdString){ //console.log('sending UDP Socket'); if(clientSockets.hasOwnProperty(nodeIdString)){ //console.log('sending to '+nodeIdString+' at '+clientSockets[nodeIdString]['ipaddress']); udpSocket.send(msg,0,msg.length,udpPortSend,clientSockets[nodeIdString]['ipaddress']); } } function udpSendColors(){ var msg = new Buffer.from([ fiveColors[0].r, fiveColors[0].g, fiveColors[0].b, fiveColors[1].r, fiveColors[1].g, fiveColors[1].b, fiveColors[2].r, fiveColors[2].g, fiveColors[2].b, fiveColors[3].r, fiveColors[3].g, fiveColors[3].b, fiveColors[4].r, fiveColors[4].g, fiveColors[4].b] ); sendUDPSocket(msg,'7'); } var doStuff3 = function(){ hue = (hue+1)%hueBase; var hue2 = (Math.floor(hue/6)*6)%hueBase; fiveColors[0] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[1] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[2] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[3] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[4] = HSVtoRGB(hue2/(hueBase),1.0,1.0); SetFiveColors('stations',fiveColors); console.log(fiveColors[0]); //FillSolid('allStations',0,LED_CLUSTER_COUNT,HSVtoRGB(hue/(hueBase),1.0,1.0)); udpSendColors(); } var doStuff4 = function(){ fiveColors = new Array(5).fill(new CRGB(0,0,0)); hue = (hue+10)%hueBase; console.log(hue); var t_hue = hue; t_hue = (hue+1)%hueBase; fiveColors[0] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+20)%hueBase; fiveColors[1] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+30)%hueBase; fiveColors[2] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+40)%hueBase; fiveColors[3] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+50)%hueBase; fiveColors[4] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); var dataBuffer = new Uint8Array([ fiveColors[0].r,fiveColors[0].g,fiveColors[0].b, fiveColors[1].r,fiveColors[1].g,fiveColors[1].b, fiveColors[2].r,fiveColors[2].g,fiveColors[2].b, fiveColors[3].r,fiveColors[3].g,fiveColors[3].b, fiveColors[4].r,fiveColors[4].g,fiveColors[4].b ]); var data64 = base64js.fromByteArray(dataBuffer); io.sockets.emit('setFives',data64); } //setInterval(doStuff2,100000/60); //setInterval(doStuff4,1000/30); setInterval(CheckForUpdate,100000);
golserver/server.js
'use strict'; const baseVersion = 3005; const express = require('express'); const app = express(); const server = require('http').createServer(app); const io = require('socket.io')(server); const color = require("rgb"); const base64js = require('base64-js'); const path = require('path'); const fs = require('fs'); const md5 = require('md5-file'); const dgram = require('dgram'); const udpSocket = dgram.createSocket('udp4'); var udpPortSend = 60000; //var udpMulticastIP = '230.185.192.109'; var udpPortRecv = 60001; var udpDestIP = '192.168.1.199'; // host everything in the public folder app.use(express.static(__dirname + '/public')); //check github.com/esp8266/Arduino/issues/2228 for example app.get('/update/base',function(req,res){ //check version somehow console.log('a device is requesting an update'); console.dir(req.headers); if(parseInt(req.headers['x-esp8266-version'])!=baseVersion){ //could be < var full_path = path.join(__dirname,'/bin/base'+baseVersion+'.bin'); fs.readFile(full_path,"binary",function(err,file){ if(err){ console.log('error uploading new firmware'); res.writeHeader(500, {"Content-Type": "text/plain"}); res.write(err + "\n"); res.end(); } else{ console.log('uploading new firmware'); res.writeHeader(200, {"Content-Type": "application/octect-stream", "Content-Disposition": "attachment;filename="+path.basename(full_path), "Content-Length": ""+fs.statSync(full_path)["size"], "x-MD5": md5.sync(full_path)}); res.write(file, "binary"); res.end(); } }); } else{ console.log('not uploading new firmware'); res.writeHeader(304, {"Content-Type": "text/plain"}); res.write("304 Not Modified\n"); res.end(); } }); var port = 80; server.listen(port); /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* Station Management */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// var stationData = { '60-01-94-10-89-E5':{ 'online':false, 'id':'2', 'mode':0, 'mac':'60-01-94-10-89-E5', 'ip':'192.168.0.102', 'name':'ESP_1089E5', 'socket':'sdfghj' } }; //var operationModes = { // 0:'StrandTest', // 1:'SlaveListen', // 2:'CapSenseControl', // 3:'StrandTest2', // 4:'StrandTest3' //} function loadStationData(){ var dataFile = fs.readFileSync('./data/stations.json'); stationData = JSON.parse(dataFile); } loadStationData(); //get stationData on server start function saveStationData(){ var dataJSON = JSON.stringify(stationData); fs.writeFileSync('./data/stations.json',dataJSON); } // removes all data entries function clearStationData(){ Object.keys(stationData).forEach(function(key) { delete stationData[key]; }); } // sets all data entries' online to false // run at start function resetStationData(){ Object.keys(stationData).forEach(function(key) { stationData[key]['online'] = false; }); } function updateStationData(data){ Object.keys(data).forEach(function(key) { Object.assign(stationData[key],data[key]); //merge data }); io.sockets.emit('syncStationData',stationData); } function stationDataListener(socket){ socket.on('idhostnameipmac',function(data){ //TODO: parse the pieces and keep in a managed list console.log('idhostnameipmac'); console.log(data); //parse and add to managed list var aData = data.split(","); // stationId_str + "," // + String(WiFi.hostname()) + "," // + String(WiFi.localIP()) + "," // + String(mac[5], HEX) + "," // + String(mac[4], HEX) + "," // + String(mac[3], HEX) + "," // + String(mac[2], HEX) + "," // + String(mac[1], HEX) + "," // + String(mac[0], HEX)).c_str() var mData = aData[8]+"-"+aData[7]+"-"+aData[6]+"-"+aData[5]+"-"+aData[4]+"-"+aData[3]; var sData = { [mData]:{ 'online':true, 'id':aData[0], 'mode':0, 'mac':mData, 'ip':aData[2], 'name':aData[1], 'socket':socket['id'] 'firmware':"unknown" } } console.log("sData"); console.log(sData); updateStationData(sData) }); } //changes the id value of a number function setStationIdListener(socket){ socket.on('setStationId',function(data){ if( !isNaN(data['stationId'])){ if( Number.parseInt(data['stationId'])){ console.log('setting mac '+data['mac']+' to station '+data['stationId']); io.to([stationData[data['mac']]['socket']]).emit('setStationId',base64js.fromByteArray(new Uint16Array([data['stationId']]))); //update station data updateStationData({ [data['mac']]:{ 'id':data['stationId'] } }); } else{ console.log('stationId not an integer: '+data); } }else{ console.log('data issue: '+data); console.dir(data); } }) } function setStationModeListener(socket){ socket.on('setStationMode',function(data){ var data64 = base64js.fromByteArray(new Uint8Array([data['modeId']])); io.to([stationData[data['mac']]['socket']]).emit(data64); updateStationData({ [data['mac']]:{ 'mode':data['modeId'] } }); }); } function pingStationListener(socket){ socket.on('pingStation',function(data){ //var dataBuffer = new Uint8Array([startIndex,numToFill,ledColor.r,ledColor.g,ledColor.b]); var dataBuffer = new Uint8Array([0,45,0,255,0]); //all green io.to([stationData[data['mac']]['socket']]).emit('fillSolid',base64js.fromByteArray(dataBuffer)); }); } function checkForUpdateListener(socket){ socket.on('checkForUpdate',function(data){ io.to([stationData[data['mac']]['socket']]).emit('checkForUpdate',""); }); } //var configFile = fs.readFileSync('./databaseTest.json'); //var config = JSON.parse(configFile); //config.push({"totalTapScore": totTap, "tileId": tileId, "timeStamp": time}); ////console.log(' db now is, ', config) //var configJSON = JSON.stringify(config); //fs.writeFileSync('./databaseTest.json', configJSON); //stations = [ // ESP_1089E5 60-01-94-10-89-E5 192.168.0.102 01:09:16 //3 ESP_0FF841 60-01-94-0F-F8-41 192.168.0.103 01:09:18 //4 ESP_108363 60-01-94-10-83-63 192.168.0.104 01:09:16 //5 ESP_108507 60-01-94-10-85-07 192.168.0.105 01:40:14 //6 ESP_0FF49E 60-01-94-0F-F4-9E 192.168.0.106 01:09:16 //7 ESP_108BE2 60-01-94-10-8B-E2 192.168.0.107 01:09:16 //8 ESP_0FF846 60-01-94-0F-F8-46 192.168.0.108 01:09:16 //9 ESP_0E7780 60-01-94-0E-77-80 192.168.0.109 01:09:15 //10 ESP_1083B1 60-01-94-10-83-B1 192.168.0.110 01:09:15 //11 ESP_108B42 60-01-94-10-8B-42 192.168.0.111 01:09:16 //12 ESP_0E7772 60-01-94-0E-77-72 192.168.0.112 01:09:20 //13 ESP_108446 60-01-94-10-84-46 192.168.0.113 01:40:16 //14 ESP_107D19 60-01-94-10-7D-19 192.168.0.114 01:09:16 //15 ESP_0E75FE 60-01-94-0E-75-FE 192.168.0.115 01:09:16 //16 ESP_0E777D 60-01-94-0E-77-7D 192.168.0.116 01:09:16 //17 ESP_10835B 60-01-94-10-83-5B 192.168.0.117 01:09:16 //18 ESP_0FF684 60-01-94-0F-F6-84 192.168.0.118 01:09:16 //19 ESP_1080FB 60-01-94-10-80-FB 192.168.0.119 01:09:16 //20 ESP_0FF4BB 60-01-94-0F-F4-BB 192.168.0.120 01:09:16 //21 ESP_0E7545 60-01-94-0E-75-45 192.168.0.121 01:40:14 //22 ESP_1083C7 60-01-94-10-83-C7 192.168.0.122 01:09:16 //23 ESP_107EC9 60-01-94-10-7E-C9 192.168.0.123 01:09:16 //24 ESP_0FF398 60-01-94-0F-F3-98 192.168.0.124 00:38:29 //25 ESP_108A60 60-01-94-10-8A-60 192.168.0.125 01:38:09 //26 ESP_0FF878 60-01-94-0F-F8-78 192.168.0.126 01:09:16 //27 ESP_0FF634 60-01-94-0F-F6-34 192.168.0.127 01:09:16 //28 ESP_108C51 60-01-94-10-8C-51 192.168.0.128 01:09:16 //29 ESP_0E7620 60-01-94-0E-76-20 192.168.0.129 01:40:29 //30 ESP_107CA2 60-01-94-10-7C-A2 192.168.0.130 01:09:16 //31 ESP_108167 60-01-94-10-81-67 192.168.0.131 01:09:16 //32 ESP_10818B 60-01-94-10-81-8B 192.168.0.132 01:09:16 //33 ESP_0E7563 60-01-94-0E-75-63 192.168.0.133 01:40:14 //34 ESP_107FB3 60-01-94-10-7F-B3 /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* test.html Socket Methods */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// function setModeRawListener(socket){ socket.on('setModeRaw',function(dataRaw){ console.log(dataRaw); var data64 = base64js.fromByteArray(new Uint8Array([dataRaw])); console.log(data64); //socket.broadcast.to('stations').emit('setMode',data64); - these didn't work //io.sockets.to('stations').emit('setMode',data64); - these didn't work io.sockets.emit('setMode',data64); }); } function checkForUpdatesListener(socket){ socket.on('checkForUpdates',function(){ io.sockets.emit('checkForUpdate',""); }); } function setFiveHueColorsListener(socket){ socket.on('setFiveHueColors',function(){ fiveColors = new Array(5).fill(new CRGB(0,0,0)); hue = (hue+10)%hueBase; console.log(hue); var t_hue = hue; t_hue = (hue+10)%hueBase; fiveColors[0] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+20)%hueBase; fiveColors[1] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+30)%hueBase; fiveColors[2] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+40)%hueBase; fiveColors[3] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+50)%hueBase; fiveColors[4] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); var dataBuffer = new Uint8Array([ fiveColors[0].r,fiveColors[0].g,fiveColors[0].b, fiveColors[1].r,fiveColors[1].g,fiveColors[1].b, fiveColors[2].r,fiveColors[2].g,fiveColors[2].b, fiveColors[3].r,fiveColors[3].g,fiveColors[3].b, fiveColors[4].r,fiveColors[4].g,fiveColors[4].b ]); var data64 = base64js.fromByteArray(dataBuffer); io.sockets.emit('setFives',data64); }); } function clearColorsListener(socket){ socket.on('clearColors',function(){{ console.log('clear colors'); io.sockets.emit('clear',""); }}); } /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* SYNC Data */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// //var uiSettings = { // 'lastElementChanged':'test', // 'elements':{ // 'test':'value' // } //}; // //function syncUISettings(socket){ // socket.on('syncUISettings',function(data){ // uiSettings['elements'][Object.keys(data)[0]] = data[Object.keys(data)[0]];;// = merge.recursive(true,data,uiSettings['elements']); // // console.log(Object.keys(data)[0]); // uiSettings['lastElementChanged'] = Object.keys(data)[0]; //assumes only one pair // //console.log(uiSettings['lastElementChanged']); // // //broadcast the updates and change for other clients to process // socket.broadcast.emit('syncUISettings',data); // }); //} //var modelData = { // //'0':[0,0,0,4,0,0,127] // //'1':[x,y,z,s,r,g,b] //}; // //function syncModelData(socket){ // socket.on('syncModelData',function(data){ // Object.keys(data).forEach(function(key) { // modelData[key] = data[key]; // }); // //broadcast the updates and change for other clients to process // socket.broadcast.emit('syncModelData',data); // }); //} // //var systemVariables = { // 'availableId':0 //}; // //function syncSystemVariables(socket){ // socket.on('syncSystemVariables',function(data){ // Object.keys(data).forEach(function(key) { // systemVariables[key] = data[key]; // }); // //broadcast the updates and change for other clients to process // socket.broadcast.emit('syncSystemVariables',data); // }); //} /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* UDP */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// udpSocket.on('error', (err) =>{ console.log('udp socket error:\n${err.stack}'); console.log('closing the udp socket'); udpServer.close(); //TODO add function to restart the udp socket }); udpSocket.on('message', (msg, rinfo) => { console.log('udp socket got: ',msg,' from ',rinfo.address,':',rinfo.port); parseData(msg); }); udpSocket.on('listening', () => { const address = udpSocket.address(); console.log('udp socket listening ',address.address,':',address.port); }); udpSocket.bind(udpPortRecv, () => {}); //udpSocket.bind(udpPortSend, function(){ // udpSocket.setBroadcast(true); // udpSocket.setMulticastTTL(128); // udpSocket.addMembership(udpMulticastIP); //}); /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /* Socket.IO */ /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////// var clientSockets = {}; //node id //socket io id //ip address //udp send port: 60000 io.on('connection',function(socket){ console.log("client "+socket['id']+" connected"); ClearAll(); stationDataListener(socket); socket.on('subscribe',function(roomName){ socket.join(roomName); console.log("client "+socket['id']+" joined room "+roomName); console.dir(socket.handshake.address); var tAddress = socket.handshake.address; var idx = tAddress.replace(/^.*:/,''); //chop down ipv6 to ipv4 console.log(idx); clientSockets[socket['id']] = { 'iosocket':socket['id'], 'ipaddress':idx } socket.on('disconnect',function(){ for(var key in clientSockets){ if(clientSockets[key]['iosocket']==socket['id']){ delete clientSockets[key]; } } }); if(roomName=='stations'){ CheckForUpdate(roomName); } if(roomName=='browsers'){ socket.emit('syncStationData',stationData); setStationIdListener(socket); setStationModeListener(socket); pingStationListener(socket); checkForUpdateListener(socket); setModeRawListener(socket); checkForUpdatesListener(socket); setFiveHueColorsListener(socket); clearColorsListener(socket); } }); }); const STATION_COUNT = 36; const LED_CLUSTER_COUNT = 45; // actual LED count is 45*3 = 135 //the locally stored color array for each station var colors = new Array(STATION_COUNT).fill(new Array(LED_CLUSTER_COUNT).fill(new CRGB(0,0,0))); /* * prototype function for CRGB data to mimic the FastLED library */ function CRGB(red, green, blue){ this.r = red; this.g = green; this.b = blue; this.int = function(){ return r>>16+g>>8+b>>0; } this.setRGB = function(red, green, blue){ this.r = red; this.g = green; this.b = blue; } this.setHSV = function(hue, sat, val){ var temp = HSVtoRGB(hue,sat,val); this.r = temp.r; this.g = temp.g; this.b = temp.b; } } /* * Turn off LEDs on a specific station */ function Clear(stationId){ io.sockets.to(stationId).emit('clear',''); } /* * Turn off LEDs on all stations */ function ClearAll(){ io.sockets.to('stations').emit('clear',''); } /* * mimics FastLED FillSolid method */ function FillSolid(stationId,startIndex,numToFill,ledColor){ if(!isNaN(stationId)){ //update server's copy of the LED cluster state for(var i=startIndex;i<startIndex+numToFill;i++){ colors[stationId][i].r = ledColor.r; colors[stationId][i].g = ledColor.g; colors[stationId][i].b = ledColor.b; } } var dataBuffer = new Uint8Array([startIndex,numToFill,ledColor.r,ledColor.g,ledColor.b]); io.sockets.to(stationId).emit('fillSolid',dataBuffer); } /* * Set a single LED color on a specific station */ function SetColor(stationId,startIndex,ledColor){ if(!isNaN(stationId)){ //update server's copy of the LED custer state colors[stationId][startIndex].r = ledColor.r; colors[stationId][startIndex].g = ledColor.g; colors[stationId][startIndex].b = ledColor.b; } var dataBuffer = new Uint8Array([startIndex,ledColor.r,ledColor.g,ledColor.b]); io.sockets.to(stationId).emit('setColor',dataBuffer); } /* * Send a set of different colors to a subset of a specific station * Could be the whole station * the number of leds is computed by the lenth of the colorArray * colorArray is an array of CRGB */ function SetColors(stationId,startIndex,colorArray){ //update server's copy of the LED custer state colors[stationId][ledIndex].r = ledColor.r; colors[stationId][ledIndex].g = ledColor.g; colors[stationId][ledIndex].b = ledColor.b; var dataBuffer = new Uint8Array([ledIndex,numToFill,ledColor.r,ledColor.g,ledColor.b]); io.sockets.to(stationId).emit('setColors',dataBuffer); } /* * GOL Station 5 segment code */ function SetFiveColors(stationId,fiveColorArray){ if(!isNaN(stationId)){ //update server's copy of the LED custer state if(stationId<STATION_COUNT){ for(var i=0;i<5;i++){ for(var j=0;j<LED_CLUSTER_COUNT/5;j++){ colors[stationId][i*9+j].r = fiveColorArray[i].r; colors[stationId][i*9+j].g = fiveColorArray[i].g; colors[stationId][i*9+j].b = fiveColorArray[i].b; } } } } // var dataArrayBuffer = new ArrayBuffer(15); // var dataBuffer = new Uint8Array(dataArrayBuffer); // dataArrayBuffer[0]=fiveColorArray[0].r; // dataArrayBuffer[1]=fiveColorArray[0].g; // dataArrayBuffer[2]=fiveColorArray[0].b; // // dataArrayBuffer[3]=fiveColorArray[1].r; // dataArrayBuffer[4]=fiveColorArray[1].g; // dataArrayBuffer[5]=fiveColorArray[1].b; // // dataArrayBuffer[6]=fiveColorArray[2].r; // dataArrayBuffer[7]=fiveColorArray[2].g; // dataArrayBuffer[8]=fiveColorArray[2].b; // // dataArrayBuffer[9]=fiveColorArray[3].r; // dataArrayBuffer[10]=fiveColorArray[3].g; // dataArrayBuffer[11]=fiveColorArray[3].b; // // dataArrayBuffer[12]=fiveColorArray[4].r; // dataArrayBuffer[13]=fiveColorArray[4].g; // dataArrayBuffer[14]=fiveColorArray[4].b; var dataBuffer2 = new Uint8Array([ fiveColorArray[0].r,fiveColorArray[0].g,fiveColorArray[0].b, fiveColorArray[1].r,fiveColorArray[1].g,fiveColorArray[1].b, fiveColorArray[2].r,fiveColorArray[2].g,fiveColorArray[2].b, fiveColorArray[3].r,fiveColorArray[3].g,fiveColorArray[3].b, fiveColorArray[4].r,fiveColorArray[4].g,fiveColorArray[4].b ]); io.sockets.to(stationId).emit('setFives',base64js.fromByteArray(dataBuffer2)); //console.log(base64js.fromByteArray(dataBuffer2)); } /* * Set the colors on the stations to the colors stored on the server * typially used on startup to retrieve the last stored state of the system */ function SyncColorsFromServer(){ for(var i=0;i<STATION_COUNT;i++){ SetColors(i,0,colors[i]); } } /* * sets entire strip colors at once * typically used for loading last saved state * or used for more efficient devliery of complete color changes */ function SetStrip(stationId,colorArray){ } /* * Force clients to check for firmware updates */ function CheckForUpdate(stationId){ io.sockets.to(stationId).emit('checkForUpdate',""); } /* * Helper method for HSV color model */ function HSVtoRGB(h, s, v) { var r, g, b, i, f, p, q, t; if (arguments.length === 1) { s = h.s, v = h.v, h = h.h; } i = Math.floor(h * 6); f = h * 6 - i; p = v * (1 - s); q = v * (1 - f * s); t = v * (1 - (1 - f) * s); switch (i % 6) { case 0: r = v, g = t, b = p; break; case 1: r = q, g = v, b = p; break; case 2: r = p, g = v, b = t; break; case 3: r = p, g = q, b = v; break; case 4: r = t, g = p, b = v; break; case 5: r = v, g = p, b = q; break; } return { r: Math.round(r * 255), g: Math.round(g * 255), b: Math.round(b * 255) }; } /* * Rainbow test code below here */ var hue = 120; var hueBase = 360; var colorString = ''; var fiveColors = new Array(5).fill(new CRGB(0,0,0)); var doStuff1 = function(){ hue = (hue+6)%hueBase; var t_hue = hue; for(var i=0;i<STATION_COUNT;i++){ t_hue = (hue+1)%hueBase; fiveColors[0] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+2)%hueBase; fiveColors[1] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+3)%hueBase; fiveColors[2] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+4)%hueBase; fiveColors[3] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+5)%hueBase; fiveColors[4] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); SetFiveColors(i,fiveColors); } fiveColors[0] = HSVtoRGB(0.0,1.0,1.0); fiveColors[1] = HSVtoRGB(0.0,1.0,1.0); fiveColors[2] = HSVtoRGB(0.0,1.0,1.0); fiveColors[3] = HSVtoRGB(0.0,1.0,1.0); fiveColors[4] = HSVtoRGB(0.0,1.0,1.0); SetFiveColors(24932,fiveColors); console.log(fiveColors[0]); }; //setInterval(doStuff1,5000); var doStuff2 = function(){ hue = (hue+1)%hueBase; var hue2 = (Math.floor(hue/6)*6)%hueBase; fiveColors[0] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[1] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[2] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[3] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[4] = HSVtoRGB(hue2/(hueBase),1.0,1.0); SetFiveColors('allStations',fiveColors); console.log(fiveColors[0]); //FillSolid('allStations',0,LED_CLUSTER_COUNT,HSVtoRGB(hue/(hueBase),1.0,1.0)); } //setInterval(doStuff2,10000/60); function sendUDPSocket(msg,nodeIdString){ //console.log('sending UDP Socket'); if(clientSockets.hasOwnProperty(nodeIdString)){ //console.log('sending to '+nodeIdString+' at '+clientSockets[nodeIdString]['ipaddress']); udpSocket.send(msg,0,msg.length,udpPortSend,clientSockets[nodeIdString]['ipaddress']); } } function udpSendColors(){ var msg = new Buffer.from([ fiveColors[0].r, fiveColors[0].g, fiveColors[0].b, fiveColors[1].r, fiveColors[1].g, fiveColors[1].b, fiveColors[2].r, fiveColors[2].g, fiveColors[2].b, fiveColors[3].r, fiveColors[3].g, fiveColors[3].b, fiveColors[4].r, fiveColors[4].g, fiveColors[4].b] ); sendUDPSocket(msg,'7'); } var doStuff3 = function(){ hue = (hue+1)%hueBase; var hue2 = (Math.floor(hue/6)*6)%hueBase; fiveColors[0] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[1] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[2] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[3] = HSVtoRGB(hue2/(hueBase),1.0,1.0); fiveColors[4] = HSVtoRGB(hue2/(hueBase),1.0,1.0); SetFiveColors('stations',fiveColors); console.log(fiveColors[0]); //FillSolid('allStations',0,LED_CLUSTER_COUNT,HSVtoRGB(hue/(hueBase),1.0,1.0)); udpSendColors(); } var doStuff4 = function(){ fiveColors = new Array(5).fill(new CRGB(0,0,0)); hue = (hue+10)%hueBase; console.log(hue); var t_hue = hue; t_hue = (hue+1)%hueBase; fiveColors[0] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+20)%hueBase; fiveColors[1] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+30)%hueBase; fiveColors[2] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+40)%hueBase; fiveColors[3] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); t_hue = (hue+50)%hueBase; fiveColors[4] = HSVtoRGB(t_hue/(hueBase),1.0,1.0); var dataBuffer = new Uint8Array([ fiveColors[0].r,fiveColors[0].g,fiveColors[0].b, fiveColors[1].r,fiveColors[1].g,fiveColors[1].b, fiveColors[2].r,fiveColors[2].g,fiveColors[2].b, fiveColors[3].r,fiveColors[3].g,fiveColors[3].b, fiveColors[4].r,fiveColors[4].g,fiveColors[4].b ]); var data64 = base64js.fromByteArray(dataBuffer); io.sockets.emit('setFives',data64); } //setInterval(doStuff2,100000/60); //setInterval(doStuff4,1000/30); setInterval(CheckForUpdate,100000);
added comma
golserver/server.js
added comma
<ide><path>olserver/server.js <ide> stationData = JSON.parse(dataFile); <ide> } <ide> loadStationData(); //get stationData on server start <add>resetStationData(); <ide> <ide> function saveStationData(){ <ide> var dataJSON = JSON.stringify(stationData); <ide> 'mac':mData, <ide> 'ip':aData[2], <ide> 'name':aData[1], <del> 'socket':socket['id'] <del> 'firmware':"unknown" <add> 'socket':socket['id'], <add> 'firmware':'unknown' <ide> } <ide> } <ide> console.log("sData");
Java
lgpl-2.1
ce548fcdcd66daccf627d6bf2c2f0d09c805b0bd
0
ggiudetti/opencms-core,sbonoc/opencms-core,gallardo/opencms-core,it-tavis/opencms-core,victos/opencms-core,alkacon/opencms-core,mediaworx/opencms-core,victos/opencms-core,alkacon/opencms-core,ggiudetti/opencms-core,alkacon/opencms-core,gallardo/opencms-core,gallardo/opencms-core,mediaworx/opencms-core,alkacon/opencms-core,victos/opencms-core,MenZil/opencms-core,victos/opencms-core,it-tavis/opencms-core,gallardo/opencms-core,sbonoc/opencms-core,sbonoc/opencms-core,MenZil/opencms-core,MenZil/opencms-core,it-tavis/opencms-core,ggiudetti/opencms-core,ggiudetti/opencms-core,sbonoc/opencms-core,mediaworx/opencms-core,it-tavis/opencms-core,mediaworx/opencms-core,MenZil/opencms-core
/* * This library is part of OpenCms - * the Open Source Content Management System * * Copyright (c) Alkacon Software GmbH (http://www.alkacon.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * For further information about Alkacon Software, please see the * company website: http://www.alkacon.com * * For further information about OpenCms, please see the * project website: http://www.opencms.org * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.opencms.gwt.client.ui.input; import org.opencms.gwt.client.CmsCoreProvider; import org.opencms.gwt.client.I_CmsHasInit; import org.opencms.gwt.client.ui.CmsPushButton; import org.opencms.gwt.client.ui.I_CmsAutoHider; import org.opencms.gwt.client.ui.css.I_CmsInputLayoutBundle; import org.opencms.gwt.client.ui.css.I_CmsLayoutBundle; import org.opencms.gwt.client.ui.input.form.CmsWidgetFactoryRegistry; import org.opencms.gwt.client.ui.input.form.I_CmsFormWidgetFactory; import org.opencms.gwt.shared.CmsLinkBean; import org.opencms.util.CmsStringUtil; import java.util.Map; import com.google.gwt.dom.client.Style.Cursor; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.event.dom.client.BlurEvent; import com.google.gwt.event.dom.client.BlurHandler; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.MouseUpEvent; import com.google.gwt.event.dom.client.MouseUpHandler; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.Event.NativePreviewEvent; import com.google.gwt.user.client.Event.NativePreviewHandler; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.TextBox; /** * Basic gallery widget for forms.<p> * * @since 8.0.0 * */ public class CmsVfsSelection extends Composite implements I_CmsFormWidget, I_CmsHasInit { /** * Event preview handler.<p> * * To be used while popup open.<p> */ protected class CloseEventPreviewHandler implements NativePreviewHandler { /** * @see com.google.gwt.user.client.Event.NativePreviewHandler#onPreviewNativeEvent(com.google.gwt.user.client.Event.NativePreviewEvent) */ public void onPreviewNativeEvent(NativePreviewEvent event) { Event nativeEvent = Event.as(event.getNativeEvent()); switch (DOM.eventGetType(nativeEvent)) { case Event.ONMOUSEMOVE: break; case Event.ONMOUSEUP: break; case Event.ONMOUSEDOWN: break; case Event.ONKEYUP: if (m_textBox.getValue().length() > 0) { close(); } else { if (m_popup == null) { open(); } else if (m_popup.isShowing()) { close(); } else { open(); } } break; case Event.ONMOUSEWHEEL: close(); break; default: // do nothing } } } /** Inner class for the open button. */ protected class OpenButton extends CmsPushButton { /** * Default constructor.<p> * @param imageClass */ public OpenButton(String imageClass) { super(imageClass); setStyleName(I_CmsLayoutBundle.INSTANCE.buttonCss().openVfsButton()); } } /** The download mode of this widget. */ public static final String DOWNLOAD = "download"; /** The downloadlink mode of this widget. */ public static final String DOWNLOAD_LINK = "download_link"; /** The filelink mode of this widget. */ public static final String FILE_LINK = "file_link"; /** The pricipal mode of this widget. */ public static final String PRINCIPAL = "principal"; /** The OrgUnit mode of this widget. */ public static final String ORGUNIT = "orgunit"; /** The html mode of this widget. */ public static final String HTML = "html"; /** The imagelink mode of this widget. */ public static final String IMAGE_LINK = "image_link"; /** The link mode of this widget. */ public static final String LINK = "link"; /** The table mode of this widget. */ public static final String TABLE = "table"; /** A counter used for giving text box widgets ids. */ private static int idCounter; /** The widget type identifier for this widget. */ private static final String WIDGET_TYPE = "vfsselection"; /** The fade panel. */ protected Panel m_fadePanel = new SimplePanel(); /** The old value. */ protected String m_oldValue = ""; /** The popup frame. */ protected CmsFramePopup m_popup; /** The handler registration. */ protected HandlerRegistration m_previewHandlerRegistration; /** The default rows set. */ int m_defaultRows; /** The root panel containing the other components of this widget. */ Panel m_panel = new FlowPanel(); /** The internal text area widget used by this widget. */ TextBox m_textBox; /** The container for the text area. */ FlowPanel m_textBoxContainer = new FlowPanel(); /** The configuration string. */ private String m_config; /** The error display for this widget. */ private CmsErrorWidget m_error = new CmsErrorWidget(); /***/ private String m_id; /** The button to to open the selection. */ private OpenButton m_openSelection; /***/ private String m_type; /** * VsfSelection widget to open the gallery selection.<p> * */ public CmsVfsSelection() { super(); } /** * VsfSelection widget to open the gallery selection.<p> * @param iconImage the image of the icon shown in the * @param type the type of this widget * @param config the configuration for this widget */ public CmsVfsSelection(String iconImage, String type, String config) { super(); m_type = type; m_config = config; m_textBox = new TextBox(); m_id = "CmsVfsSelection_" + (idCounter++); m_textBox.getElement().setId(m_id); m_openSelection = new OpenButton(iconImage); m_textBoxContainer.add(m_openSelection); createFader(); initWidget(m_panel); m_panel.add(m_textBoxContainer); m_fadePanel.setStyleName(I_CmsInputLayoutBundle.INSTANCE.inputCss().vfsInputBoxFader()); m_fadePanel.getElement().getStyle().setRight(21, Unit.PX); m_fadePanel.getElement().getStyle().setCursor(Cursor.TEXT); m_fadePanel.getElement().getStyle().setBottom(7, Unit.PX); m_textBoxContainer.add(m_textBox); m_fadePanel.addDomHandler(new ClickHandler() { public void onClick(ClickEvent event) { m_textBox.setFocus(true); } }, ClickEvent.getType()); m_panel.add(m_error); m_textBoxContainer.addStyleName(I_CmsLayoutBundle.INSTANCE.generalCss().cornerAll()); m_textBox.addMouseUpHandler(new MouseUpHandler() { public void onMouseUp(MouseUpEvent event) { m_textBoxContainer.remove(m_fadePanel); setTitle(""); if (m_popup == null) { open(); } else if (m_popup.isShowing()) { close(); } else { open(); } } }); m_textBox.addBlurHandler(new BlurHandler() { public void onBlur(BlurEvent event) { if ((m_textBox.getValue().length() * 6.88) > m_textBox.getOffsetWidth()) { m_textBoxContainer.add(m_fadePanel); setTitle(m_textBox.getValue()); } } }); m_openSelection.addClickHandler(new ClickHandler() { public void onClick(ClickEvent event) { if (m_popup == null) { open(); } else if (m_popup.isShowing()) { close(); } else { open(); } } }); } /** * Initializes this class.<p> */ public static void initClass() { // registers a factory for creating new instances of this widget CmsWidgetFactoryRegistry.instance().registerFactory(WIDGET_TYPE, new I_CmsFormWidgetFactory() { /** * @see org.opencms.gwt.client.ui.input.form.I_CmsFormWidgetFactory#createWidget(java.util.Map) */ public I_CmsFormWidget createWidget(Map<String, String> widgetParams) { return new CmsVfsSelection(); } }); } /** * @param handler */ public void addValueChangeHandler(ValueChangeHandler<String> handler) { m_textBox.addValueChangeHandler(handler); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#getApparentValue() */ public String getApparentValue() { return getFormValueAsString(); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#getFieldType() */ public FieldType getFieldType() { return I_CmsFormWidget.FieldType.STRING; } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#getFormValue() */ public Object getFormValue() { if (m_textBox.getText() == null) { return ""; } return m_textBox.getValue(); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#getFormValueAsString() */ public String getFormValueAsString() { return (String)getFormValue(); } /** * Returns the selected link as a bean.<p> * * @return the selected link as a bean */ public CmsLinkBean getLinkBean() { String link = m_textBox.getValue(); if (CmsStringUtil.isEmptyOrWhitespaceOnly(link)) { return null; } return new CmsLinkBean(m_textBox.getText(), true); } /** * Returns the text contained in the text area.<p> * * @return the text in the text area */ public String getText() { return m_textBox.getValue(); } /** * Returns the textarea of this widget.<p> * * @return the textarea */ public TextBox getTextArea() { return m_textBox; } /** * Returns the text box container of this widget.<p> * * @return the text box container */ public FlowPanel getTextAreaContainer() { return m_textBoxContainer; } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#isEnabled() */ public boolean isEnabled() { return m_textBox.isEnabled(); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#reset() */ public void reset() { m_textBox.setText(""); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#setAutoHideParent(org.opencms.gwt.client.ui.I_CmsAutoHider) */ public void setAutoHideParent(I_CmsAutoHider autoHideParent) { // nothing to do } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#setEnabled(boolean) */ public void setEnabled(boolean enabled) { m_textBox.setEnabled(enabled); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#setErrorMessage(java.lang.String) */ public void setErrorMessage(String errorMessage) { m_error.setText(errorMessage); } /** * Sets the value of the widget.<p> * * @param value the new value */ public void setFormValue(Object value) { if (value == null) { value = ""; } if (value instanceof String) { String strValue = (String)value; m_textBox.setText(strValue); createFader(); setTitle(strValue); } } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#setFormValueAsString(java.lang.String) */ public void setFormValueAsString(String newValue) { setFormValue(newValue); } /** * Sets the link from a bean.<p> * * @param link the link bean */ public void setLinkBean(CmsLinkBean link) { if (link == null) { link = new CmsLinkBean("", true); } m_textBox.setValue(link.getLink()); } /** * Sets the name of the input field.<p> * * @param name of the input field * */ public void setName(String name) { m_textBox.setName(name); } /** * Sets the text in the text area.<p> * * @param text the new text */ public void setText(String text) { m_textBox.setValue(text); } /** * @see com.google.gwt.user.client.ui.UIObject#setTitle(java.lang.String) */ @Override public void setTitle(String title) { if ((title.length() * 6.88) > m_panel.getOffsetWidth()) { m_textBox.getElement().setTitle(title); } else { m_textBox.getElement().setTitle(""); } } /** * Creates the URL for the gallery dialog IFrame.<p> * * @return the URL for the gallery dialog IFrame */ protected String buildGalleryUrl() { String basePath = ""; if (m_type.equals(LINK) || m_type.equals(HTML) || m_type.equals(TABLE) || m_type.equals(PRINCIPAL)) { if (m_type.equals(LINK)) { basePath = "/system/workplace/galleries/linkgallery/index.jsp?dialogmode=widget&fieldid=" + m_id; } else if (m_type.equals(HTML)) { basePath = "/system/workplace/galleries/htmlgallery/index.jsp?dialogmode=widget&fieldid=" + m_id; } else if (m_type.equals(TABLE)) { basePath = "/system/workplace/galleries/tablegallery/index.jsp?dialogmode=widget&fieldid=" + m_id; } else if (m_type.equals(PRINCIPAL)) { basePath = "/system/workplace/commons/principal_selection.jsp?dialogmode=widget&fieldid=" + m_id; } else { basePath = "/system/workplace/galleries/" + m_type + "gallery/index.jsp"; } } else { basePath = "/system/modules/org.opencms.ade.galleries/gallery.jsp"; basePath += "?dialogmode=widget&fieldid=" + m_id; } String pathparameter = m_textBox.getText(); if (pathparameter.indexOf("/") > -1) { basePath += "&currentelement=" + pathparameter; } basePath += m_config; //basePath += "&gwt.codesvr=127.0.0.1:9996"; //to start the hosted mode just remove commentary return CmsCoreProvider.get().link(basePath); } /** * Close the popup of this widget.<p> * */ protected void close() { m_popup.hideDelayed(); m_textBox.setFocus(true); m_textBox.setCursorPos(m_textBox.getText().length()); } /** * Opens the popup of this widget.<p> * */ protected void open() { m_oldValue = m_textBox.getValue(); if (m_popup == null) { String title = org.opencms.gwt.client.Messages.get().key( org.opencms.gwt.client.Messages.GUI_GALLERY_SELECT_DIALOG_TITLE_0); m_popup = new CmsFramePopup(title, buildGalleryUrl()); m_popup.setCloseHandler(new Runnable() { public void run() { String textboxValue = m_textBox.getText(); if (!m_oldValue.equals(textboxValue)) { m_textBox.setValue("", true); m_textBox.setValue(textboxValue, true); } if (m_previewHandlerRegistration != null) { m_previewHandlerRegistration.removeHandler(); m_previewHandlerRegistration = null; } m_textBox.setFocus(true); m_textBox.setCursorPos(m_textBox.getText().length()); } }); m_popup.setModal(false); m_popup.setId(m_id); m_popup.setWidth(717); if (m_type.equals(DOWNLOAD)) { m_popup.getFrame().setSize("705px", "640px"); } else if (m_type.equals(HTML)) { m_popup.getFrame().setSize("705px", "640px"); } else if (m_type.equals(LINK)) { m_popup.getFrame().setSize("705px", "640px"); } else if (m_type.equals(TABLE)) { m_popup.getFrame().setSize("705px", "640px"); } else if (m_type.equals(PRINCIPAL)) { m_popup.getFrame().setSize("705px", "320px"); } else { m_popup.getFrame().setSize("705px", "485px"); } m_popup.addDialogClose(new Command() { public void execute() { close(); } }); } else { m_popup.getFrame().setUrl(buildGalleryUrl()); } m_popup.setAutoHideEnabled(true); m_popup.center(); if (m_previewHandlerRegistration == null) { m_previewHandlerRegistration = Event.addNativePreviewHandler(new CloseEventPreviewHandler()); } } /** * Adds the fader if necessary.<p> * */ private void createFader() { if ((m_textBox.getValue().length() * 6.88) > m_textBox.getOffsetWidth()) { m_textBoxContainer.add(m_fadePanel); } } }
src-gwt/org/opencms/gwt/client/ui/input/CmsVfsSelection.java
/* * This library is part of OpenCms - * the Open Source Content Management System * * Copyright (c) Alkacon Software GmbH (http://www.alkacon.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * For further information about Alkacon Software, please see the * company website: http://www.alkacon.com * * For further information about OpenCms, please see the * project website: http://www.opencms.org * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.opencms.gwt.client.ui.input; import org.opencms.gwt.client.CmsCoreProvider; import org.opencms.gwt.client.I_CmsHasInit; import org.opencms.gwt.client.ui.CmsPushButton; import org.opencms.gwt.client.ui.I_CmsAutoHider; import org.opencms.gwt.client.ui.css.I_CmsInputLayoutBundle; import org.opencms.gwt.client.ui.css.I_CmsLayoutBundle; import org.opencms.gwt.client.ui.input.form.CmsWidgetFactoryRegistry; import org.opencms.gwt.client.ui.input.form.I_CmsFormWidgetFactory; import org.opencms.gwt.shared.CmsLinkBean; import org.opencms.util.CmsStringUtil; import java.util.Map; import com.google.gwt.dom.client.Style.Cursor; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.event.dom.client.BlurEvent; import com.google.gwt.event.dom.client.BlurHandler; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.MouseUpEvent; import com.google.gwt.event.dom.client.MouseUpHandler; import com.google.gwt.event.logical.shared.ValueChangeHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Event; import com.google.gwt.user.client.Event.NativePreviewEvent; import com.google.gwt.user.client.Event.NativePreviewHandler; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.TextBox; /** * Basic gallery widget for forms.<p> * * @since 8.0.0 * */ public class CmsVfsSelection extends Composite implements I_CmsFormWidget, I_CmsHasInit { /** * Event preview handler.<p> * * To be used while popup open.<p> */ protected class CloseEventPreviewHandler implements NativePreviewHandler { /** * @see com.google.gwt.user.client.Event.NativePreviewHandler#onPreviewNativeEvent(com.google.gwt.user.client.Event.NativePreviewEvent) */ public void onPreviewNativeEvent(NativePreviewEvent event) { Event nativeEvent = Event.as(event.getNativeEvent()); switch (DOM.eventGetType(nativeEvent)) { case Event.ONMOUSEMOVE: break; case Event.ONMOUSEUP: break; case Event.ONMOUSEDOWN: break; case Event.ONKEYUP: if (m_textBox.getValue().length() > 0) { close(); } else { if (m_popup == null) { open(); } else if (m_popup.isShowing()) { close(); } else { open(); } } break; case Event.ONMOUSEWHEEL: close(); break; default: // do nothing } } } /** Inner class for the open button. */ protected class OpenButton extends CmsPushButton { /** * Default constructor.<p> * @param imageClass */ public OpenButton(String imageClass) { super(imageClass); setStyleName(I_CmsLayoutBundle.INSTANCE.buttonCss().openVfsButton()); } } /** The download mode of this widget. */ public static final String DOWNLOAD = "download"; /** The downloadlink mode of this widget. */ public static final String DOWNLOAD_LINK = "download_link"; /** The filelink mode of this widget. */ public static final String FILE_LINK = "file_link"; /** The pricipal mode of this widget. */ public static final String PRINCIPAL = "principal"; /** The OrgUnit mode of this widget. */ public static final String ORGUNIT = "orgunit"; /** The html mode of this widget. */ public static final String HTML = "html"; /** The imagelink mode of this widget. */ public static final String IMAGE_LINK = "image_link"; /** The link mode of this widget. */ public static final String LINK = "link"; /** The table mode of this widget. */ public static final String TABLE = "table"; /** A counter used for giving text box widgets ids. */ private static int idCounter; /** The widget type identifier for this widget. */ private static final String WIDGET_TYPE = "vfsselection"; /** The fade panel. */ protected Panel m_fadePanel = new SimplePanel(); /** The old value. */ protected String m_oldValue = ""; /** The popup frame. */ protected CmsFramePopup m_popup; /** The handler registration. */ protected HandlerRegistration m_previewHandlerRegistration; /** The x-coords of the popup. */ protected int m_xcoordspopup; /** The y-coords of the popup. */ protected int m_ycoordspopup; /** The default rows set. */ int m_defaultRows; /** The root panel containing the other components of this widget. */ Panel m_panel = new FlowPanel(); /** The internal text area widget used by this widget. */ TextBox m_textBox; /** The container for the text area. */ FlowPanel m_textBoxContainer = new FlowPanel(); /** The configuration string. */ private String m_config; /** The error display for this widget. */ private CmsErrorWidget m_error = new CmsErrorWidget(); /***/ private String m_id; /** The button to to open the selection. */ private OpenButton m_openSelection; /***/ private String m_type; /** * VsfSelection widget to open the gallery selection.<p> * */ public CmsVfsSelection() { super(); } /** * VsfSelection widget to open the gallery selection.<p> * @param iconImage the image of the icon shown in the * @param type the type of this widget * @param config the configuration for this widget */ public CmsVfsSelection(String iconImage, String type, String config) { super(); m_type = type; m_config = config; m_textBox = new TextBox(); m_id = "CmsVfsSelection_" + (idCounter++); m_textBox.getElement().setId(m_id); m_openSelection = new OpenButton(iconImage); m_textBoxContainer.add(m_openSelection); creatFaider(); initWidget(m_panel); m_panel.add(m_textBoxContainer); m_fadePanel.setStyleName(I_CmsInputLayoutBundle.INSTANCE.inputCss().vfsInputBoxFader()); m_fadePanel.getElement().getStyle().setRight(21, Unit.PX); m_fadePanel.getElement().getStyle().setCursor(Cursor.TEXT); m_fadePanel.getElement().getStyle().setBottom(7, Unit.PX); m_textBoxContainer.add(m_textBox); m_fadePanel.addDomHandler(new ClickHandler() { public void onClick(ClickEvent event) { m_textBox.setFocus(true); } }, ClickEvent.getType()); m_panel.add(m_error); m_textBoxContainer.addStyleName(I_CmsLayoutBundle.INSTANCE.generalCss().cornerAll()); m_textBox.addMouseUpHandler(new MouseUpHandler() { public void onMouseUp(MouseUpEvent event) { m_textBoxContainer.remove(m_fadePanel); setTitle(""); if (m_popup == null) { open(); } else if (m_popup.isShowing()) { close(); } else { open(); } } }); m_textBox.addBlurHandler(new BlurHandler() { public void onBlur(BlurEvent event) { if ((m_textBox.getValue().length() * 6.88) > m_textBox.getOffsetWidth()) { m_textBoxContainer.add(m_fadePanel); setTitle(m_textBox.getValue()); } } }); m_openSelection.addClickHandler(new ClickHandler() { public void onClick(ClickEvent event) { if (m_popup == null) { open(); } else if (m_popup.isShowing()) { close(); } else { open(); } } }); } /** * Initializes this class.<p> */ public static void initClass() { // registers a factory for creating new instances of this widget CmsWidgetFactoryRegistry.instance().registerFactory(WIDGET_TYPE, new I_CmsFormWidgetFactory() { /** * @see org.opencms.gwt.client.ui.input.form.I_CmsFormWidgetFactory#createWidget(java.util.Map) */ public I_CmsFormWidget createWidget(Map<String, String> widgetParams) { return new CmsVfsSelection(); } }); } /** * @param handler */ public void addValueChangeHandler(ValueChangeHandler<String> handler) { m_textBox.addValueChangeHandler(handler); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#getApparentValue() */ public String getApparentValue() { return getFormValueAsString(); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#getFieldType() */ public FieldType getFieldType() { return I_CmsFormWidget.FieldType.STRING; } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#getFormValue() */ public Object getFormValue() { if (m_textBox.getText() == null) { return ""; } return m_textBox.getValue(); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#getFormValueAsString() */ public String getFormValueAsString() { return (String)getFormValue(); } /** * Returns the selected link as a bean.<p> * * @return the selected link as a bean */ public CmsLinkBean getLinkBean() { String link = m_textBox.getValue(); if (CmsStringUtil.isEmptyOrWhitespaceOnly(link)) { return null; } return new CmsLinkBean(m_textBox.getText(), true); } /** * Returns the text contained in the text area.<p> * * @return the text in the text area */ public String getText() { return m_textBox.getValue(); } /** * Returns the textarea of this widget.<p> * * @return the textarea */ public TextBox getTextArea() { return m_textBox; } /** * Returns the text box container of this widget.<p> * * @return the text box container */ public FlowPanel getTextAreaContainer() { return m_textBoxContainer; } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#isEnabled() */ public boolean isEnabled() { return m_textBox.isEnabled(); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#reset() */ public void reset() { m_textBox.setText(""); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#setAutoHideParent(org.opencms.gwt.client.ui.I_CmsAutoHider) */ public void setAutoHideParent(I_CmsAutoHider autoHideParent) { // nothing to do } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#setEnabled(boolean) */ public void setEnabled(boolean enabled) { m_textBox.setEnabled(enabled); } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#setErrorMessage(java.lang.String) */ public void setErrorMessage(String errorMessage) { m_error.setText(errorMessage); } /** * Sets the value of the widget.<p> * * @param value the new value */ public void setFormValue(Object value) { if (value == null) { value = ""; } if (value instanceof String) { String strValue = (String)value; m_textBox.setText(strValue); creatFaider(); setTitle(strValue); } } /** * @see org.opencms.gwt.client.ui.input.I_CmsFormWidget#setFormValueAsString(java.lang.String) */ public void setFormValueAsString(String newValue) { setFormValue(newValue); } /** * Sets the link from a bean.<p> * * @param link the link bean */ public void setLinkBean(CmsLinkBean link) { if (link == null) { link = new CmsLinkBean("", true); } m_textBox.setValue(link.getLink()); } /** * Sets the name of the input field.<p> * * @param name of the input field * */ public void setName(String name) { m_textBox.setName(name); } /** * Sets the text in the text area.<p> * * @param text the new text */ public void setText(String text) { m_textBox.setValue(text); } /** * @see com.google.gwt.user.client.ui.UIObject#setTitle(java.lang.String) */ @Override public void setTitle(String title) { if ((title.length() * 6.88) > m_panel.getOffsetWidth()) { m_textBox.getElement().setTitle(title); } else { m_textBox.getElement().setTitle(""); } } /** * Creates the URL for the gallery dialog IFrame.<p> * * @return the URL for the gallery dialog IFrame */ protected String buildGalleryUrl() { String basePath = ""; if (m_type.equals(LINK) || m_type.equals(HTML) || m_type.equals(TABLE) || m_type.equals(PRINCIPAL)) { if (m_type.equals(LINK)) { basePath = "/system/workplace/galleries/linkgallery/index.jsp?dialogmode=widget&fieldid=" + m_id; } else if (m_type.equals(HTML)) { basePath = "/system/workplace/galleries/htmlgallery/index.jsp?dialogmode=widget&fieldid=" + m_id; } else if (m_type.equals(TABLE)) { basePath = "/system/workplace/galleries/tablegallery/index.jsp?dialogmode=widget&fieldid=" + m_id; } else if (m_type.equals(PRINCIPAL)) { basePath = "/system/workplace/commons/principal_selection.jsp?dialogmode=widget&fieldid=" + m_id; } else { basePath = "/system/workplace/galleries/" + m_type + "gallery/index.jsp"; } } else { basePath = "/system/modules/org.opencms.ade.galleries/gallery.jsp"; basePath += "?dialogmode=widget&fieldid=" + m_id; } String pathparameter = m_textBox.getText(); if (pathparameter.indexOf("/") > -1) { basePath += "&currentelement=" + pathparameter; } basePath += m_config; //basePath += "&gwt.codesvr=127.0.0.1:9996"; //to start the hosted mode just remove commentary return CmsCoreProvider.get().link(basePath); } /** * Close the popup of this widget.<p> * */ protected void close() { m_popup.hideDelayed(); m_textBox.setFocus(true); m_textBox.setCursorPos(m_textBox.getText().length()); } /** * Opens the popup of this widget.<p> * */ protected void open() { m_oldValue = m_textBox.getValue(); if (m_popup == null) { String title = org.opencms.gwt.client.Messages.get().key( org.opencms.gwt.client.Messages.GUI_GALLERY_SELECT_DIALOG_TITLE_0); m_popup = new CmsFramePopup(title, buildGalleryUrl()); m_popup.setCloseHandler(new Runnable() { public void run() { String textboxValue = m_textBox.getText(); if (!m_oldValue.equals(textboxValue)) { m_textBox.setValue("", true); m_textBox.setValue(textboxValue, true); } if (m_previewHandlerRegistration != null) { m_previewHandlerRegistration.removeHandler(); m_previewHandlerRegistration = null; } m_textBox.setFocus(true); m_textBox.setCursorPos(m_textBox.getText().length()); } }); m_popup.setModal(false); m_popup.setId(m_id); m_popup.setWidth(717); if (m_type.equals(DOWNLOAD)) { m_popup.getFrame().setSize("705px", "640px"); } else if (m_type.equals(HTML)) { m_popup.getFrame().setSize("705px", "640px"); } else if (m_type.equals(LINK)) { m_popup.getFrame().setSize("705px", "640px"); } else if (m_type.equals(TABLE)) { m_popup.getFrame().setSize("705px", "640px"); } else if (m_type.equals(PRINCIPAL)) { m_popup.getFrame().setSize("705px", "320px"); } else { m_popup.getFrame().setSize("705px", "485px"); } m_popup.addDialogClose(new Command() { public void execute() { close(); } }); } else { m_popup.getFrame().setUrl(buildGalleryUrl()); } m_popup.setAutoHideEnabled(true); m_popup.showRelativeTo(m_textBox); if (m_previewHandlerRegistration == null) { m_previewHandlerRegistration = Event.addNativePreviewHandler(new CloseEventPreviewHandler()); } m_xcoordspopup = m_popup.getPopupLeft(); m_ycoordspopup = m_popup.getPopupTop(); } /** * Adds the fader if necessary.<p> * */ private void creatFaider() { if ((m_textBox.getValue().length() * 6.88) > m_textBox.getOffsetWidth()) { m_textBoxContainer.add(m_fadePanel); } } }
Improved dialog positioning.
src-gwt/org/opencms/gwt/client/ui/input/CmsVfsSelection.java
Improved dialog positioning.
<ide><path>rc-gwt/org/opencms/gwt/client/ui/input/CmsVfsSelection.java <ide> /** The handler registration. */ <ide> protected HandlerRegistration m_previewHandlerRegistration; <ide> <del> /** The x-coords of the popup. */ <del> protected int m_xcoordspopup; <del> <del> /** The y-coords of the popup. */ <del> protected int m_ycoordspopup; <del> <ide> /** The default rows set. */ <ide> int m_defaultRows; <ide> <ide> m_openSelection = new OpenButton(iconImage); <ide> <ide> m_textBoxContainer.add(m_openSelection); <del> creatFaider(); <add> createFader(); <ide> initWidget(m_panel); <ide> m_panel.add(m_textBoxContainer); <ide> m_fadePanel.setStyleName(I_CmsInputLayoutBundle.INSTANCE.inputCss().vfsInputBoxFader()); <ide> if (value instanceof String) { <ide> String strValue = (String)value; <ide> m_textBox.setText(strValue); <del> creatFaider(); <add> createFader(); <ide> setTitle(strValue); <ide> } <ide> <ide> m_popup.getFrame().setUrl(buildGalleryUrl()); <ide> } <ide> m_popup.setAutoHideEnabled(true); <del> m_popup.showRelativeTo(m_textBox); <add> m_popup.center(); <ide> if (m_previewHandlerRegistration == null) { <ide> m_previewHandlerRegistration = Event.addNativePreviewHandler(new CloseEventPreviewHandler()); <ide> } <del> <del> m_xcoordspopup = m_popup.getPopupLeft(); <del> m_ycoordspopup = m_popup.getPopupTop(); <del> <ide> } <ide> <ide> /** <ide> * Adds the fader if necessary.<p> <ide> * */ <del> private void creatFaider() { <add> private void createFader() { <ide> <ide> if ((m_textBox.getValue().length() * 6.88) > m_textBox.getOffsetWidth()) { <ide> m_textBoxContainer.add(m_fadePanel);
Java
apache-2.0
16d605a7474ceee83582cc47de00a1eb049bf8a0
0
fengbaicanhe/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,fitermay/intellij-community,kdwink/intellij-community,kool79/intellij-community,petteyg/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,semonte/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,hurricup/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,amith01994/intellij-community,dslomov/intellij-community,blademainer/intellij-community,jexp/idea2,TangHao1987/intellij-community,fitermay/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,jexp/idea2,joewalnes/idea-community,vvv1559/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,apixandru/intellij-community,FHannes/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,ernestp/consulo,alphafoobar/intellij-community,blademainer/intellij-community,caot/intellij-community,izonder/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,holmes/intellij-community,consulo/consulo,akosyakov/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,fitermay/intellij-community,asedunov/intellij-community,holmes/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,vladmm/intellij-community,da1z/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,hurricup/intellij-community,retomerz/intellij-community,izonder/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,semonte/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,izonder/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,signed/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,tmpgit/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,ernestp/consulo,ahb0327/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,supersven/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,jexp/idea2,vvv1559/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,hurricup/intellij-community,da1z/intellij-community,Lekanich/intellij-community,signed/intellij-community,adedayo/intellij-community,hurricup/intellij-community,ryano144/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,ernestp/consulo,pwoodworth/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,izonder/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,kool79/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,supersven/intellij-community,fnouama/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,dslomov/intellij-community,samthor/intellij-community,clumsy/intellij-community,joewalnes/idea-community,akosyakov/intellij-community,adedayo/intellij-community,diorcety/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,blademainer/intellij-community,fitermay/intellij-community,amith01994/intellij-community,slisson/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,ibinti/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,izonder/intellij-community,petteyg/intellij-community,nicolargo/intellij-community,allotria/intellij-community,apixandru/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,amith01994/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,consulo/consulo,kdwink/intellij-community,slisson/intellij-community,gnuhub/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,vladmm/intellij-community,apixandru/intellij-community,kdwink/intellij-community,diorcety/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,consulo/consulo,semonte/intellij-community,ernestp/consulo,salguarnieri/intellij-community,dslomov/intellij-community,clumsy/intellij-community,caot/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,vvv1559/intellij-community,kool79/intellij-community,caot/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,nicolargo/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,lucafavatella/intellij-community,jexp/idea2,mglukhikh/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,caot/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,joewalnes/idea-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,holmes/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,vvv1559/intellij-community,caot/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,da1z/intellij-community,apixandru/intellij-community,robovm/robovm-studio,apixandru/intellij-community,jagguli/intellij-community,ernestp/consulo,salguarnieri/intellij-community,joewalnes/idea-community,ahb0327/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,kdwink/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,kool79/intellij-community,vladmm/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,robovm/robovm-studio,vladmm/intellij-community,FHannes/intellij-community,caot/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,kool79/intellij-community,samthor/intellij-community,dslomov/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,semonte/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,blademainer/intellij-community,allotria/intellij-community,jagguli/intellij-community,signed/intellij-community,kool79/intellij-community,joewalnes/idea-community,hurricup/intellij-community,fitermay/intellij-community,signed/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,retomerz/intellij-community,adedayo/intellij-community,fitermay/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,nicolargo/intellij-community,izonder/intellij-community,kdwink/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,hurricup/intellij-community,clumsy/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,izonder/intellij-community,signed/intellij-community,holmes/intellij-community,samthor/intellij-community,wreckJ/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,jexp/idea2,ibinti/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,samthor/intellij-community,jexp/idea2,ryano144/intellij-community,samthor/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,samthor/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,ernestp/consulo,FHannes/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,retomerz/intellij-community,vladmm/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,clumsy/intellij-community,holmes/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,jagguli/intellij-community,diorcety/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,robovm/robovm-studio,hurricup/intellij-community,xfournet/intellij-community,slisson/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,signed/intellij-community,caot/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,vladmm/intellij-community,semonte/intellij-community,supersven/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,samthor/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,supersven/intellij-community,diorcety/intellij-community,allotria/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,signed/intellij-community,semonte/intellij-community,fnouama/intellij-community,samthor/intellij-community,robovm/robovm-studio,supersven/intellij-community,hurricup/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,xfournet/intellij-community,slisson/intellij-community,caot/intellij-community,wreckJ/intellij-community,semonte/intellij-community,da1z/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,ryano144/intellij-community,xfournet/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,consulo/consulo,blademainer/intellij-community,fnouama/intellij-community,adedayo/intellij-community,FHannes/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,amith01994/intellij-community,joewalnes/idea-community,jexp/idea2,consulo/consulo,holmes/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,joewalnes/idea-community,signed/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,signed/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,apixandru/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,jexp/idea2,fitermay/intellij-community,youdonghai/intellij-community,izonder/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,da1z/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,fnouama/intellij-community,asedunov/intellij-community,clumsy/intellij-community,supersven/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,izonder/intellij-community,petteyg/intellij-community,da1z/intellij-community,consulo/consulo,wreckJ/intellij-community,fnouama/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,retomerz/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,supersven/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,samthor/intellij-community,signed/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,jagguli/intellij-community,petteyg/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,FHannes/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,ryano144/intellij-community,petteyg/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,youdonghai/intellij-community,kool79/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,ryano144/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,vladmm/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,clumsy/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,allotria/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community
package com.intellij.ide.fileTemplates.impl; import com.intellij.ide.fileTemplates.*; import com.intellij.j2ee.J2EEFileTemplateNames; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ApplicationComponent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.TabbedPaneWrapper; import com.intellij.util.ArrayUtil; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import java.awt.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.Map; /* * @author: MYakovlev * Date: Jul 26, 2002 * Time: 12:44:56 PM */ public class AllFileTemplatesConfigurable implements Configurable, ApplicationComponent { private static final Logger LOG = Logger.getInstance("#com.intellij.ide.fileTemplates.impl.AllFileTemplatesConfigurable"); private JPanel myMainPanel; private FileTemplateTab myCurrentTab; private FileTemplateTab myTemplatesList; private FileTemplateTab myPatternsList; private FileTemplateTab myCodeTemplatesList; private FileTemplateTab myJ2eeTemplatesList; private JComponent myToolBar; private TabbedPaneWrapper myTabbedPane; private FileTemplateConfigurable myEditor; private boolean myModified = false; protected JComponent myEditorComponent; private final static int TEMPLATE_ID = 0; private final static int PATTERN_ID = 1; private final static int CODE_ID = 2; private final static int J2EE_ID = 3; private static final Icon ourIcon = IconLoader.getIcon("/general/fileTemplates.png"); private FileTemplateTab[] myTabs; private static final String TEMPLATES_TITLE = "Templates"; private static final String INCLUDES_TITLE = "Includes"; private static final String CODE_TITLE = "Code"; private static final String J2EE_TITLE = "J2EE"; public void disposeComponent() { } public void initComponent() { } public Icon getIcon() { return ourIcon; } public String getComponentName() { return "FileTemplateOptions"; } private void onRemove() { myCurrentTab.removeSelected(); myModified = true; } private void onAdd() { createTemplate("Unnamed", "java", ""); } private FileTemplate createTemplate(String prefName, String extension, String content) { FileTemplate[] templates = myCurrentTab.getTemplates(); ArrayList names = new ArrayList(templates.length); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; names.add(template.getName()); } String name = prefName; int i = 0; while (names.contains(name)) { name = prefName + " (" + (++i) + ")"; } FileTemplate newTemplate = new FileTemplateImpl(content, name, extension); myCurrentTab.addTemplate(newTemplate); myModified = true; myCurrentTab.selectTemplate(newTemplate); fireListChanged(); myEditor.focusToNameField(); return newTemplate; } private void onClone() { FileTemplate selected = myCurrentTab.getSelectedTemplate(); if (selected == null) return; final FileTemplate[] templates = myCurrentTab.getTemplates(); ArrayList names = new ArrayList(templates.length); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; names.add(template.getName()); } String name1String = "Copy "; String name2String = "of " + selected.getName(); String name = name1String + name2String; int i = 0; while (names.contains(name)) { name = name1String + (++i) + " " + name2String; } FileTemplate newTemplate = new FileTemplateImpl(selected.getText(), name, selected.getExtension()); myCurrentTab.addTemplate(newTemplate); myModified = true; myCurrentTab.selectTemplate(newTemplate); fireListChanged(); } public String getDisplayName() { return "File Templates"; } public String getHelpTopic() { int index = myTabbedPane.getSelectedIndex(); switch (index) { case 0: return "fileTemplates.templates"; case 1: return "fileTemplates.includes"; case 2: return "fileTemplates.code"; case 3: return "fileTemplates.j2ee"; default: throw new IllegalStateException("wrong index: " + index); } } public JComponent createComponent() { myTemplatesList = new FileTemplateTabAsList(TEMPLATES_TITLE) { public void onTemplateSelected() { onListSelectionChanged(); } }; myPatternsList = new FileTemplateTabAsList(INCLUDES_TITLE) { public void onTemplateSelected() { onListSelectionChanged(); } }; myCodeTemplatesList = new FileTemplateTabAsList(CODE_TITLE) { public void onTemplateSelected() { onListSelectionChanged(); } }; myCurrentTab = myTemplatesList; myJ2eeTemplatesList = new FileTemplateTabAsTree(J2EE_TITLE) { public void onTemplateSelected() { onListSelectionChanged(); } protected FileTemplateTabAsTree.TreeNode initModel() { ArrayList<TreeNode> categories = new ArrayList<TreeNode>(); categories.add(new TreeNode("EJB", ModuleType.EJB.getNodeIcon(true), new TreeNode[]{ new TreeNode("Java code templates", StdFileTypes.JAVA.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_CLASS_BMP_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_CLASS_CMP_1x_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_CLASS_CMP_2x_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_HOME_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_LOCAL_HOME_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.LOCAL_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.REMOTE_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SESSION_CLASS_STATEFUL_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SESSION_CLASS_STATELESS_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SESSION_HOME_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SESSION_LOCAL_HOME_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.MESSAGE_CLASS_TEMPLATE), }), new TreeNode("Deployment descriptors", StdFileTypes.XML.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.EJB_JAR_XML_1_1), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.EJB_JAR_XML_2_0), }), })); categories.add(new TreeNode("Application", ModuleType.J2EE_APPLICATION.getNodeIcon(true), new TreeNode[]{ new TreeNode("Deployment descriptors", StdFileTypes.XML.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.APPLICATION_XML_1_2), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.APPLICATION_XML_1_3), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.APPLICATION_XML_1_4), }), })); categories.add(new TreeNode("Web", ModuleType.WEB.getNodeIcon(true), new TreeNode[]{ new TreeNode("Java code templates", StdFileTypes.JAVA.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SERVLET_CLASS_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.FILTER_CLASS_TEMPLATE), }), new TreeNode("Deployment descriptors", StdFileTypes.XML.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.WEB_XML_22), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.WEB_XML_23), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.WEB_XML_24), }), new TreeNode("Jsp files", StdFileTypes.JSP.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.JSP.getIcon(), J2EEFileTemplateNames.JSP_FILE), new TreeNode(StdFileTypes.JSPX.getIcon(), J2EEFileTemplateNames.JSPX_FILE) }), })); FileTemplateGroupDescriptorFactory[] templateGroupFactories = (FileTemplateGroupDescriptorFactory[])ApplicationManager.getApplication().getComponents(FileTemplateGroupDescriptorFactory.class); for (int i = 0; i < templateGroupFactories.length; i++) { FileTemplateGroupDescriptor fileTemplatesDescriptor = templateGroupFactories[i].getFileTemplatesDescriptor(); if (fileTemplatesDescriptor != null) { categories.add(createNode(fileTemplatesDescriptor)); } } return new TreeNode("ROOT", null, categories.toArray(new TreeNode[categories.size()])); } }; myTabs = new FileTemplateTab[]{myTemplatesList, myPatternsList, myCodeTemplatesList, myJ2eeTemplatesList}; myTabbedPane = new TabbedPaneWrapper(); myTabbedPane.installKeyboardNavigation(); myTabbedPane.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT); for (int i = 0; i < myTabs.length; i++) { FileTemplateTab tab = myTabs[i]; myTabbedPane.addTab(tab.getTitle(), new JScrollPane(tab.getComponent())); } myTabbedPane.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { onTabChanged(); } }); DefaultActionGroup group = new DefaultActionGroup(); AnAction removeAction = new AnAction("Remove Template", null, IconLoader.getIcon("/general/remove.png")) { public void actionPerformed(AnActionEvent e) { onRemove(); } public void update(AnActionEvent e) { super.update(e); FileTemplate selectedItem = myCurrentTab.getSelectedTemplate(); e.getPresentation().setEnabled(selectedItem != null && !isInternalTemplate(selectedItem.getName(), myCurrentTab.getTitle())); } }; AnAction addAction = new AnAction("Create Template", null, IconLoader.getIcon("/general/add.png")) { public void actionPerformed(AnActionEvent e) { onAdd(); } public void update(AnActionEvent e) { super.update(e); e.getPresentation().setEnabled(!(myCurrentTab == myCodeTemplatesList || myCurrentTab == myJ2eeTemplatesList)); } }; AnAction cloneAction = new AnAction("Copy Template", null, IconLoader.getIcon("/actions/copy.png")) { public void actionPerformed(AnActionEvent e) { onClone(); } public void update(AnActionEvent e) { super.update(e); e.getPresentation().setEnabled(myCurrentTab != myCodeTemplatesList && myCurrentTab != myJ2eeTemplatesList && myCurrentTab.getSelectedTemplate() != null); } }; AnAction resetAction = new AnAction("Reset To Default", null, IconLoader.getIcon("/actions/reset.png")) { public void actionPerformed(AnActionEvent e) { onReset(); } public void update(AnActionEvent e) { super.update(e); FileTemplate selectedItem = myCurrentTab.getSelectedTemplate(); FileTemplateManager manager = FileTemplateManager.getInstance(); e.getPresentation().setEnabled(selectedItem != null && !selectedItem.isDefault() && manager.getDefaultTemplate(selectedItem.getName(), selectedItem.getExtension()) != null); } }; group.add(addAction); group.add(removeAction); group.add(cloneAction); group.add(resetAction); addAction.registerCustomShortcutSet(CommonShortcuts.INSERT, myCurrentTab.getComponent()); removeAction.registerCustomShortcutSet(CommonShortcuts.DELETE, myCurrentTab.getComponent()); myToolBar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true).getComponent(); myEditor = new FileTemplateConfigurable(); myEditor.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { onEditorChanged(); } }); myMainPanel = new JPanel(new GridBagLayout()) { public void doLayout() { doMainPanelLayout(); } }; // Layout manager is ignored myMainPanel.add(myToolBar, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0)); myMainPanel.add(myTabbedPane.getComponent(), new GridBagConstraints(0, 1, 1, 1, 0.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(2, 2, 2, 2), 0, 0)); myEditorComponent = myEditor.createComponent(); myMainPanel.add(myEditorComponent, new GridBagConstraints(1, 0, 1, 2, 1.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(2, 2, 2, 2), 0, 0)); myMainPanel.setMinimumSize(new Dimension(400, 300)); myMainPanel.setPreferredSize(new Dimension(700, 500)); return myMainPanel; } private FileTemplateTabAsTree.TreeNode createNode(FileTemplateDescriptor descriptor) { if (descriptor instanceof FileTemplateGroupDescriptor) { FileTemplateDescriptor[] children = ((FileTemplateGroupDescriptor)descriptor).getTemplates(); FileTemplateTabAsTree.TreeNode[] nodes = new FileTemplateTabAsTree.TreeNode[children.length]; for (int i = 0; i < nodes.length; i++) { nodes[i] = createNode(children[i]); } return new FileTemplateTabAsTree.TreeNode(((FileTemplateGroupDescriptor)descriptor).getTitle(), descriptor.getIcon(), nodes); } return new FileTemplateTabAsTree.TreeNode(descriptor.getIcon(), descriptor.getFileName()); } private void onReset() { FileTemplate selected = myCurrentTab.getSelectedTemplate(); if (selected != null) { if (Messages.showOkCancelDialog("Reset to original template?", "Reset Template", Messages.getQuestionIcon()) != DialogWrapper.OK_EXIT_CODE) { return; } FileTemplateImpl template = (FileTemplateImpl)selected; template.resetToDefault(); myEditor.reset(); myModified = true; } } private void onEditorChanged() { fireListChanged(); } private void onTabChanged() { int selectedIndex = myTabbedPane.getSelectedIndex(); if (0 <= selectedIndex && selectedIndex < myTabs.length) { myCurrentTab = myTabs[selectedIndex]; } onListSelectionChanged(); } private void onListSelectionChanged() { FileTemplate selectedValue = myCurrentTab.getSelectedTemplate(); FileTemplate prevTemplate = myEditor == null ? null : myEditor.getTemplate(); if (prevTemplate != selectedValue) { //selection has changed if (myEditor.isModified()) { try { myModified = true; myEditor.apply(); fireListChanged(); } catch (ConfigurationException e) { LOG.error(e); } } if (selectedValue == null) { myEditor.setTemplate(null, FileTemplateManager.getInstance().getDefaultTemplateDescription()); } else { selectTemplate(selectedValue); } } } private void selectTemplate(FileTemplate template) { VirtualFile defDesc = null; if (myCurrentTab == myTemplatesList) { defDesc = FileTemplateManager.getInstance().getDefaultTemplateDescription(); } else if (myCurrentTab == myPatternsList) { defDesc = FileTemplateManager.getInstance().getDefaultIncludeDescription(); } if (myEditor.getTemplate() != template) { myEditor.setTemplate(template, defDesc); final boolean isInternal = isInternalTemplate(template.getName(), myCurrentTab.getTitle()); myEditor.setShowInternalMessage(isInternal ? " " : null); myEditor.setShowAdjustCheckBox(myTemplatesList == myCurrentTab); } } // internal template could not be removed and should be rendered bold public static boolean isInternalTemplate(String templateName, String templateTabTitle) { if (templateName == null) return false; if (Comparing.strEqual(templateTabTitle, TEMPLATES_TITLE)) { return Comparing.strEqual(templateName, FileTemplateManager.INTERNAL_CLASS_TEMPLATE_NAME) || Comparing.strEqual(templateName, FileTemplateManager.INTERNAL_INTERFACE_TEMPLATE_NAME) || Comparing.strEqual(templateName, FileTemplateManager.INTERNAL_ENUM_TEMPLATE_NAME) || Comparing.strEqual(templateName, FileTemplateManager.INTERNAL_ANNOTATION_TYPE_TEMPLATE_NAME); } if (Comparing.strEqual(templateTabTitle, CODE_TITLE)) { return true; } if (Comparing.strEqual(templateTabTitle, J2EE_TITLE)) { return true; } if (Comparing.strEqual(templateTabTitle, INCLUDES_TITLE)) { return Comparing.strEqual(templateName, FileTemplateManager.FILE_HEADER_TEMPLATE_NAME); } return false; } private void doMainPanelLayout() { Dimension toolbarPreferredSize = myToolBar.getPreferredSize(); Dimension mainPanelSize = myMainPanel.getSize(); Dimension scrollPanePreferedSize = myTabbedPane.getComponent().getPreferredSize(); if (mainPanelSize.width < 1 || mainPanelSize.height < 1) { return; } int leftWidth = scrollPanePreferedSize.width; leftWidth = Math.min(leftWidth, mainPanelSize.width / 5); leftWidth = Math.max(leftWidth, 300); //to prevent tabs from scrolling //todo[myakovlev] Calculate tabs preferred size leftWidth = Math.max(leftWidth, toolbarPreferredSize.width); int x = 2; int y = 2; int width = toolbarPreferredSize.width; int height = toolbarPreferredSize.height; myToolBar.setBounds(x, y, width, height); y += height + 2; width = leftWidth + 2; height = Math.max(1, mainPanelSize.height - 2 - y); myTabbedPane.getComponent().setBounds(x, y, width, height); x += width + 4; y = 2; width = Math.max(1, mainPanelSize.width - 2 - x); height = Math.max(1, mainPanelSize.height - 2 - y); myEditorComponent.setBounds(x, y, width, height); myEditorComponent.revalidate(); } private void initLists() { FileTemplateManager templateManager = FileTemplateManager.getInstance(); FileTemplate[] templates = templateManager.getAllTemplates(); FileTemplate[] internals = templateManager.getInternalTemplates(); FileTemplate[] templatesAndInternals = ArrayUtil.mergeArrays(internals, templates, FileTemplate.class); myTemplatesList.init(templatesAndInternals); myPatternsList.init(templateManager.getAllPatterns()); myCodeTemplatesList.init(templateManager.getAllCodeTemplates()); myJ2eeTemplatesList.init(templateManager.getAllJ2eeTemplates()); } public boolean isModified() { return myModified || myEditor.isModified(); } /** * If apply is acceptable, returns true. If no, returns false and fills error string. */ public boolean canApply(final boolean showErrorDialog, String[] errorString) { for (int i = 0; i < myTabs.length; i++) { FileTemplateTab list = myTabs[i]; if (!canApply(showErrorDialog, errorString, list)) return false; } return true; } public boolean canApply(final boolean showErrorDialog, String[] errorString, FileTemplateTab list) { final FileTemplate[] templates = myCurrentTab.getTemplates(); ArrayList allNames = new ArrayList(); FileTemplate itemWithError = null; String errorMessage = null; String errorTitle = null; boolean errorInName = true; for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; boolean isClassTemplate = Comparing.strEqual(template.getName(), FileTemplateManager.INTERNAL_CLASS_TEMPLATE_NAME); boolean isInterfaceTemplate = Comparing.strEqual(template.getName(), FileTemplateManager.INTERNAL_INTERFACE_TEMPLATE_NAME); if (isClassTemplate || isInterfaceTemplate) continue; String currName = template.getName(); String currExt = template.getExtension(); if (currName.length() == 0) { itemWithError = template; errorMessage = "Please specify a name for this template"; errorTitle = "Template Name Not Specified"; errorString[0] = "Please specify template name"; break; } if (allNames.contains(currName)) { itemWithError = template; errorMessage = "Please specify a different name for this template"; errorTitle = "Template already exists"; errorString[0] = "Template with such name already exists. Please specify a different template name"; break; } if (currExt.length() == 0) { itemWithError = template; errorMessage = "Please specify an extension for this template"; errorTitle = "Template Extension Not Specified"; errorString[0] = "Please specify template extension"; errorInName = false; break; } allNames.add(currName); } if (itemWithError == null) { return true; } else { final String _errorString = errorMessage; final String _errorTitle = errorTitle; final boolean _errorInName = errorInName; myTabbedPane.setSelectedIndex(Arrays.asList(myTabs).indexOf(list)); selectTemplate(itemWithError); list.selectTemplate(itemWithError); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { if (showErrorDialog) { Messages.showMessageDialog(myMainPanel, _errorString, _errorTitle, Messages.getErrorIcon()); } if (_errorInName) { myEditor.focusToNameField(); } else { myEditor.focusToExtensionField(); } } }); return false; } } private void fireListChanged() { myCurrentTab.fireDataChanged(); if (myMainPanel != null) { myMainPanel.revalidate(); } } public void apply() throws ConfigurationException { if (myEditor != null && myEditor.isModified()) { myModified = true; myEditor.apply(); } String[] errorString = new String[1]; if (!canApply(false, errorString)) { throw new ConfigurationException(errorString[0]); } // Apply templates ArrayList newModifiedItems = new ArrayList(); FileTemplate[] templates = myTemplatesList.getTemplates(); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; newModifiedItems.add(template); } FileTemplateManager templatesManager = FileTemplateManager.getInstance(); apply(newModifiedItems, myTemplatesList.savedTemplates, TEMPLATE_ID, templatesManager.getAllTemplates()); // Apply patterns newModifiedItems = new ArrayList(); templates = myPatternsList.getTemplates(); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; newModifiedItems.add(template); } apply(newModifiedItems, myPatternsList.savedTemplates, PATTERN_ID, templatesManager.getAllPatterns()); //Apply code templates newModifiedItems = new ArrayList(); templates = myCodeTemplatesList.getTemplates(); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; newModifiedItems.add(template); } apply(newModifiedItems, myCodeTemplatesList.savedTemplates, CODE_ID, templatesManager.getAllCodeTemplates()); //Apply J2EE templates newModifiedItems = new ArrayList(); templates = myJ2eeTemplatesList.getTemplates(); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; newModifiedItems.add(template); } apply(newModifiedItems, myJ2eeTemplatesList.savedTemplates, J2EE_ID, templatesManager.getAllJ2eeTemplates()); FileTemplateManager.getInstance().saveAll(); if (myEditor != null) { myModified = false; fireListChanged(); reset(); } } private static void removeTemplate(FileTemplate aTemplate, int listId, boolean fromDiskOnly) { FileTemplateManager manager = FileTemplateManager.getInstance(); if (listId == AllFileTemplatesConfigurable.TEMPLATE_ID) { if (!aTemplate.isInternal()) { manager.removeTemplate(aTemplate, fromDiskOnly); } else { manager.removeInternal(aTemplate); } } else if (listId == PATTERN_ID) { manager.removePattern(aTemplate, fromDiskOnly); } else if (listId == CODE_ID) { manager.removeCodeTemplate(aTemplate, fromDiskOnly); } else if (listId == J2EE_ID) { manager.removeJ2eeTemplate(aTemplate, fromDiskOnly); } } private static void apply(ArrayList newModifiedItems, Map savedTemplate2ModifiedTemplate, int listId, FileTemplate[] templates) { FileTemplateManager templatesManager = FileTemplateManager.getInstance(); if (listId == TEMPLATE_ID) { FileTemplate[] internals = templatesManager.getInternalTemplates(); templates = ArrayUtil.mergeArrays(internals, templates, FileTemplate.class); } ArrayList savedTemplates = new ArrayList(); // Delete removed and fill savedTemplates for (int i = 0; i < templates.length; i++) { FileTemplate aTemplate = templates[i]; FileTemplate aModifiedTemplate = (FileTemplate)savedTemplate2ModifiedTemplate.get(aTemplate); if (newModifiedItems.contains(aModifiedTemplate)) { savedTemplates.add(aTemplate); } else { removeTemplate(aTemplate, listId, false); savedTemplate2ModifiedTemplate.remove(aTemplate); } } // Now all removed templates deleted from table, savedTemplates contains all templates in table for (Iterator iterator = savedTemplates.iterator(); iterator.hasNext();) { FileTemplate aTemplate = (FileTemplate)iterator.next(); FileTemplate aModifiedTemplate = (FileTemplate)savedTemplate2ModifiedTemplate.get(aTemplate); LOG.assertTrue(aModifiedTemplate != null); aTemplate.setAdjust(aModifiedTemplate.isAdjust()); if (!aModifiedTemplate.isDefault()) { FileTemplateUtil.copyTemplate(aModifiedTemplate, aTemplate); } else { if (!aTemplate.isDefault()) { removeTemplate(aTemplate, listId, true); } } } // Add new templates to table for (Iterator iterator = newModifiedItems.iterator(); iterator.hasNext();) { FileTemplate aModifiedTemplate = (FileTemplate)iterator.next(); LOG.assertTrue(aModifiedTemplate != null); if (!savedTemplate2ModifiedTemplate.containsValue(aModifiedTemplate)) { if (listId == AllFileTemplatesConfigurable.TEMPLATE_ID) { templatesManager.addTemplate(aModifiedTemplate.getName(), aModifiedTemplate.getExtension()).setText(aModifiedTemplate.getText()); } else if (listId == AllFileTemplatesConfigurable.PATTERN_ID) { templatesManager.addPattern(aModifiedTemplate.getName(), aModifiedTemplate.getExtension()).setText(aModifiedTemplate.getText()); } else if (listId == CODE_ID) { templatesManager.addCodeTemplate(aModifiedTemplate.getName(), aModifiedTemplate.getExtension()).setText(aModifiedTemplate.getText()); } else if (listId == J2EE_ID) { templatesManager.addJ2eeTemplate(aModifiedTemplate.getName(), aModifiedTemplate.getExtension()).setText(aModifiedTemplate.getText()); } } } } public void reset() { myEditor.reset(); initLists(); myModified = false; } public void disposeUIResources() { if (myEditor != null) { myEditor.disposeUIResources(); myEditor = null; myEditorComponent = null; } if (myTabbedPane != null) { myTabbedPane.uninstallKeyboardNavigation(); } myMainPanel = null; } public JComponent getPreferredFocusedComponent() { return myCurrentTab.getComponent(); } public void createNewTemplate(String preferredName, String extension, String text) { createTemplate(preferredName, extension, text); } }
source/com/intellij/ide/fileTemplates/impl/AllFileTemplatesConfigurable.java
package com.intellij.ide.fileTemplates.impl; import com.intellij.ide.fileTemplates.*; import com.intellij.j2ee.J2EEFileTemplateNames; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ApplicationComponent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.module.ModuleType; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.IconLoader; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.ui.TabbedPaneWrapper; import com.intellij.util.ArrayUtil; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import java.awt.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.Map; /* * @author: MYakovlev * Date: Jul 26, 2002 * Time: 12:44:56 PM */ public class AllFileTemplatesConfigurable implements Configurable, ApplicationComponent { private static final Logger LOG = Logger.getInstance("#com.intellij.ide.fileTemplates.impl.AllFileTemplatesConfigurable"); private JPanel myMainPanel; private FileTemplateTab myCurrentTab; private FileTemplateTab myTemplatesList; private FileTemplateTab myPatternsList; private FileTemplateTab myCodeTemplatesList; private FileTemplateTab myJ2eeTemplatesList; private JComponent myToolBar; private TabbedPaneWrapper myTabbedPane; private FileTemplateConfigurable myEditor; private boolean myModified = false; protected JComponent myEditorComponent; private final static int TEMPLATE_ID = 0; private final static int PATTERN_ID = 1; private final static int CODE_ID = 2; private final static int J2EE_ID = 3; private static final Icon ourIcon = IconLoader.getIcon("/general/fileTemplates.png"); private FileTemplateTab[] myTabs; private static final String TEMPLATES_TITLE = "Templates"; private static final String INCLUDES_TITLE = "Includes"; private static final String CODE_TITLE = "Code"; private static final String J2EE_TITLE = "J2EE"; public void disposeComponent() { } public void initComponent() { } public Icon getIcon() { return ourIcon; } public String getComponentName() { return "FileTemplateOptions"; } private void onRemove() { myCurrentTab.removeSelected(); myModified = true; } private void onAdd() { createTemplate("Unnamed", "java", ""); } private FileTemplate createTemplate(String prefName, String extension, String content) { FileTemplate[] templates = myCurrentTab.getTemplates(); ArrayList names = new ArrayList(templates.length); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; names.add(template.getName()); } String name = prefName; int i = 0; while (names.contains(name)) { name = prefName + " (" + (++i) + ")"; } FileTemplate newTemplate = new FileTemplateImpl(content, name, extension); myCurrentTab.addTemplate(newTemplate); myModified = true; myCurrentTab.selectTemplate(newTemplate); fireListChanged(); myEditor.focusToNameField(); return newTemplate; } private void onClone() { FileTemplate selected = myCurrentTab.getSelectedTemplate(); if (selected == null) return; final FileTemplate[] templates = myCurrentTab.getTemplates(); ArrayList names = new ArrayList(templates.length); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; names.add(template.getName()); } String name1String = "Copy "; String name2String = "of " + selected.getName(); String name = name1String + name2String; int i = 0; while (names.contains(name)) { name = name1String + (++i) + " " + name2String; } FileTemplate newTemplate = new FileTemplateImpl(selected.getText(), name, selected.getExtension()); myCurrentTab.addTemplate(newTemplate); myModified = true; myCurrentTab.selectTemplate(newTemplate); fireListChanged(); } public String getDisplayName() { return "File Templates"; } public String getHelpTopic() { int index = myTabbedPane.getSelectedIndex(); switch (index) { case 0: return "fileTemplates.templates"; case 1: return "fileTemplates.includes"; case 2: return "fileTemplates.code"; case 3: return "fileTemplates.j2ee"; default: throw new IllegalStateException("wrong index: " + index); } } public JComponent createComponent() { myTemplatesList = new FileTemplateTabAsList(TEMPLATES_TITLE) { public void onTemplateSelected() { onListSelectionChanged(); } }; myPatternsList = new FileTemplateTabAsList(INCLUDES_TITLE) { public void onTemplateSelected() { onListSelectionChanged(); } }; myCodeTemplatesList = new FileTemplateTabAsList(CODE_TITLE) { public void onTemplateSelected() { onListSelectionChanged(); } }; myCurrentTab = myTemplatesList; myJ2eeTemplatesList = new FileTemplateTabAsTree(J2EE_TITLE) { public void onTemplateSelected() { onListSelectionChanged(); } protected FileTemplateTabAsTree.TreeNode initModel() { ArrayList<TreeNode> categories = new ArrayList<TreeNode>(); categories.add(new TreeNode("EJB", ModuleType.EJB.getNodeIcon(true), new TreeNode[]{ new TreeNode("Java code templates", StdFileTypes.JAVA.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_CLASS_BMP_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_CLASS_CMP_1x_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_CLASS_CMP_2x_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_HOME_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.ENTITY_LOCAL_HOME_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.LOCAL_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.REMOTE_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SESSION_CLASS_STATEFUL_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SESSION_CLASS_STATELESS_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SESSION_HOME_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SESSION_LOCAL_HOME_INTERFACE_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.MESSAGE_CLASS_TEMPLATE), }), new TreeNode("Deployment descriptors", StdFileTypes.XML.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.EJB_JAR_XML_1_1), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.EJB_JAR_XML_2_0), }), })); categories.add(new TreeNode("Application", ModuleType.J2EE_APPLICATION.getNodeIcon(true), new TreeNode[]{ new TreeNode("Deployment descriptors", StdFileTypes.XML.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.APPLICATION_XML_1_2), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.APPLICATION_XML_1_3), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.APPLICATION_XML_1_4), }), })); categories.add(new TreeNode("Web", ModuleType.WEB.getNodeIcon(true), new TreeNode[]{ new TreeNode("Java code templates", StdFileTypes.JAVA.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.SERVLET_CLASS_TEMPLATE), new TreeNode(StdFileTypes.JAVA.getIcon(), J2EEFileTemplateNames.FILTER_CLASS_TEMPLATE), }), new TreeNode("Deployment descriptors", StdFileTypes.XML.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.WEB_XML_22), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.WEB_XML_23), new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.WEB_XML_24), }), new TreeNode("Jsp files", StdFileTypes.JSP.getIcon(), new TreeNode[]{ new TreeNode(StdFileTypes.JSP.getIcon(), J2EEFileTemplateNames.JSP_FILE) }), })); FileTemplateGroupDescriptorFactory[] templateGroupFactories = (FileTemplateGroupDescriptorFactory[])ApplicationManager.getApplication().getComponents(FileTemplateGroupDescriptorFactory.class); for (int i = 0; i < templateGroupFactories.length; i++) { FileTemplateGroupDescriptor fileTemplatesDescriptor = templateGroupFactories[i].getFileTemplatesDescriptor(); if (fileTemplatesDescriptor != null) { categories.add(createNode(fileTemplatesDescriptor)); } } return new TreeNode("ROOT", null, categories.toArray(new TreeNode[categories.size()])); } }; myTabs = new FileTemplateTab[]{myTemplatesList, myPatternsList, myCodeTemplatesList, myJ2eeTemplatesList}; myTabbedPane = new TabbedPaneWrapper(); myTabbedPane.installKeyboardNavigation(); myTabbedPane.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT); for (int i = 0; i < myTabs.length; i++) { FileTemplateTab tab = myTabs[i]; myTabbedPane.addTab(tab.getTitle(), new JScrollPane(tab.getComponent())); } myTabbedPane.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { onTabChanged(); } }); DefaultActionGroup group = new DefaultActionGroup(); AnAction removeAction = new AnAction("Remove Template", null, IconLoader.getIcon("/general/remove.png")) { public void actionPerformed(AnActionEvent e) { onRemove(); } public void update(AnActionEvent e) { super.update(e); FileTemplate selectedItem = myCurrentTab.getSelectedTemplate(); e.getPresentation().setEnabled(selectedItem != null && !isInternalTemplate(selectedItem.getName(), myCurrentTab.getTitle())); } }; AnAction addAction = new AnAction("Create Template", null, IconLoader.getIcon("/general/add.png")) { public void actionPerformed(AnActionEvent e) { onAdd(); } public void update(AnActionEvent e) { super.update(e); e.getPresentation().setEnabled(!(myCurrentTab == myCodeTemplatesList || myCurrentTab == myJ2eeTemplatesList)); } }; AnAction cloneAction = new AnAction("Copy Template", null, IconLoader.getIcon("/actions/copy.png")) { public void actionPerformed(AnActionEvent e) { onClone(); } public void update(AnActionEvent e) { super.update(e); e.getPresentation().setEnabled(myCurrentTab != myCodeTemplatesList && myCurrentTab != myJ2eeTemplatesList && myCurrentTab.getSelectedTemplate() != null); } }; AnAction resetAction = new AnAction("Reset To Default", null, IconLoader.getIcon("/actions/reset.png")) { public void actionPerformed(AnActionEvent e) { onReset(); } public void update(AnActionEvent e) { super.update(e); FileTemplate selectedItem = myCurrentTab.getSelectedTemplate(); FileTemplateManager manager = FileTemplateManager.getInstance(); e.getPresentation().setEnabled(selectedItem != null && !selectedItem.isDefault() && manager.getDefaultTemplate(selectedItem.getName(), selectedItem.getExtension()) != null); } }; group.add(addAction); group.add(removeAction); group.add(cloneAction); group.add(resetAction); addAction.registerCustomShortcutSet(CommonShortcuts.INSERT, myCurrentTab.getComponent()); removeAction.registerCustomShortcutSet(CommonShortcuts.DELETE, myCurrentTab.getComponent()); myToolBar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true).getComponent(); myEditor = new FileTemplateConfigurable(); myEditor.addChangeListener(new ChangeListener() { public void stateChanged(ChangeEvent e) { onEditorChanged(); } }); myMainPanel = new JPanel(new GridBagLayout()) { public void doLayout() { doMainPanelLayout(); } }; // Layout manager is ignored myMainPanel.add(myToolBar, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, new Insets(2, 2, 2, 2), 0, 0)); myMainPanel.add(myTabbedPane.getComponent(), new GridBagConstraints(0, 1, 1, 1, 0.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(2, 2, 2, 2), 0, 0)); myEditorComponent = myEditor.createComponent(); myMainPanel.add(myEditorComponent, new GridBagConstraints(1, 0, 1, 2, 1.0, 1.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(2, 2, 2, 2), 0, 0)); myMainPanel.setMinimumSize(new Dimension(400, 300)); myMainPanel.setPreferredSize(new Dimension(700, 500)); return myMainPanel; } private FileTemplateTabAsTree.TreeNode createNode(FileTemplateDescriptor descriptor) { if (descriptor instanceof FileTemplateGroupDescriptor) { FileTemplateDescriptor[] children = ((FileTemplateGroupDescriptor)descriptor).getTemplates(); FileTemplateTabAsTree.TreeNode[] nodes = new FileTemplateTabAsTree.TreeNode[children.length]; for (int i = 0; i < nodes.length; i++) { nodes[i] = createNode(children[i]); } return new FileTemplateTabAsTree.TreeNode(((FileTemplateGroupDescriptor)descriptor).getTitle(), descriptor.getIcon(), nodes); } return new FileTemplateTabAsTree.TreeNode(descriptor.getIcon(), descriptor.getFileName()); } private void onReset() { FileTemplate selected = myCurrentTab.getSelectedTemplate(); if (selected != null) { if (Messages.showOkCancelDialog("Reset to original template?", "Reset Template", Messages.getQuestionIcon()) != DialogWrapper.OK_EXIT_CODE) { return; } FileTemplateImpl template = (FileTemplateImpl)selected; template.resetToDefault(); myEditor.reset(); myModified = true; } } private void onEditorChanged() { fireListChanged(); } private void onTabChanged() { int selectedIndex = myTabbedPane.getSelectedIndex(); if (0 <= selectedIndex && selectedIndex < myTabs.length) { myCurrentTab = myTabs[selectedIndex]; } onListSelectionChanged(); } private void onListSelectionChanged() { FileTemplate selectedValue = myCurrentTab.getSelectedTemplate(); FileTemplate prevTemplate = myEditor == null ? null : myEditor.getTemplate(); if (prevTemplate != selectedValue) { //selection has changed if (myEditor.isModified()) { try { myModified = true; myEditor.apply(); fireListChanged(); } catch (ConfigurationException e) { LOG.error(e); } } if (selectedValue == null) { myEditor.setTemplate(null, FileTemplateManager.getInstance().getDefaultTemplateDescription()); } else { selectTemplate(selectedValue); } } } private void selectTemplate(FileTemplate template) { VirtualFile defDesc = null; if (myCurrentTab == myTemplatesList) { defDesc = FileTemplateManager.getInstance().getDefaultTemplateDescription(); } else if (myCurrentTab == myPatternsList) { defDesc = FileTemplateManager.getInstance().getDefaultIncludeDescription(); } if (myEditor.getTemplate() != template) { myEditor.setTemplate(template, defDesc); final boolean isInternal = isInternalTemplate(template.getName(), myCurrentTab.getTitle()); myEditor.setShowInternalMessage(isInternal ? " " : null); myEditor.setShowAdjustCheckBox(myTemplatesList == myCurrentTab); } } // internal template could not be removed and should be rendered bold public static boolean isInternalTemplate(String templateName, String templateTabTitle) { if (templateName == null) return false; if (Comparing.strEqual(templateTabTitle, TEMPLATES_TITLE)) { return Comparing.strEqual(templateName, FileTemplateManager.INTERNAL_CLASS_TEMPLATE_NAME) || Comparing.strEqual(templateName, FileTemplateManager.INTERNAL_INTERFACE_TEMPLATE_NAME) || Comparing.strEqual(templateName, FileTemplateManager.INTERNAL_ENUM_TEMPLATE_NAME) || Comparing.strEqual(templateName, FileTemplateManager.INTERNAL_ANNOTATION_TYPE_TEMPLATE_NAME); } if (Comparing.strEqual(templateTabTitle, CODE_TITLE)) { return true; } if (Comparing.strEqual(templateTabTitle, J2EE_TITLE)) { return true; } if (Comparing.strEqual(templateTabTitle, INCLUDES_TITLE)) { return Comparing.strEqual(templateName, FileTemplateManager.FILE_HEADER_TEMPLATE_NAME); } return false; } private void doMainPanelLayout() { Dimension toolbarPreferredSize = myToolBar.getPreferredSize(); Dimension mainPanelSize = myMainPanel.getSize(); Dimension scrollPanePreferedSize = myTabbedPane.getComponent().getPreferredSize(); if (mainPanelSize.width < 1 || mainPanelSize.height < 1) { return; } int leftWidth = scrollPanePreferedSize.width; leftWidth = Math.min(leftWidth, mainPanelSize.width / 5); leftWidth = Math.max(leftWidth, 300); //to prevent tabs from scrolling //todo[myakovlev] Calculate tabs preferred size leftWidth = Math.max(leftWidth, toolbarPreferredSize.width); int x = 2; int y = 2; int width = toolbarPreferredSize.width; int height = toolbarPreferredSize.height; myToolBar.setBounds(x, y, width, height); y += height + 2; width = leftWidth + 2; height = Math.max(1, mainPanelSize.height - 2 - y); myTabbedPane.getComponent().setBounds(x, y, width, height); x += width + 4; y = 2; width = Math.max(1, mainPanelSize.width - 2 - x); height = Math.max(1, mainPanelSize.height - 2 - y); myEditorComponent.setBounds(x, y, width, height); myEditorComponent.revalidate(); } private void initLists() { FileTemplateManager templateManager = FileTemplateManager.getInstance(); FileTemplate[] templates = templateManager.getAllTemplates(); FileTemplate[] internals = templateManager.getInternalTemplates(); FileTemplate[] templatesAndInternals = ArrayUtil.mergeArrays(internals, templates, FileTemplate.class); myTemplatesList.init(templatesAndInternals); myPatternsList.init(templateManager.getAllPatterns()); myCodeTemplatesList.init(templateManager.getAllCodeTemplates()); myJ2eeTemplatesList.init(templateManager.getAllJ2eeTemplates()); } public boolean isModified() { return myModified || myEditor.isModified(); } /** * If apply is acceptable, returns true. If no, returns false and fills error string. */ public boolean canApply(final boolean showErrorDialog, String[] errorString) { for (int i = 0; i < myTabs.length; i++) { FileTemplateTab list = myTabs[i]; if (!canApply(showErrorDialog, errorString, list)) return false; } return true; } public boolean canApply(final boolean showErrorDialog, String[] errorString, FileTemplateTab list) { final FileTemplate[] templates = myCurrentTab.getTemplates(); ArrayList allNames = new ArrayList(); FileTemplate itemWithError = null; String errorMessage = null; String errorTitle = null; boolean errorInName = true; for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; boolean isClassTemplate = Comparing.strEqual(template.getName(), FileTemplateManager.INTERNAL_CLASS_TEMPLATE_NAME); boolean isInterfaceTemplate = Comparing.strEqual(template.getName(), FileTemplateManager.INTERNAL_INTERFACE_TEMPLATE_NAME); if (isClassTemplate || isInterfaceTemplate) continue; String currName = template.getName(); String currExt = template.getExtension(); if (currName.length() == 0) { itemWithError = template; errorMessage = "Please specify a name for this template"; errorTitle = "Template Name Not Specified"; errorString[0] = "Please specify template name"; break; } if (allNames.contains(currName)) { itemWithError = template; errorMessage = "Please specify a different name for this template"; errorTitle = "Template already exists"; errorString[0] = "Template with such name already exists. Please specify a different template name"; break; } if (currExt.length() == 0) { itemWithError = template; errorMessage = "Please specify an extension for this template"; errorTitle = "Template Extension Not Specified"; errorString[0] = "Please specify template extension"; errorInName = false; break; } allNames.add(currName); } if (itemWithError == null) { return true; } else { final String _errorString = errorMessage; final String _errorTitle = errorTitle; final boolean _errorInName = errorInName; myTabbedPane.setSelectedIndex(Arrays.asList(myTabs).indexOf(list)); selectTemplate(itemWithError); list.selectTemplate(itemWithError); ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { if (showErrorDialog) { Messages.showMessageDialog(myMainPanel, _errorString, _errorTitle, Messages.getErrorIcon()); } if (_errorInName) { myEditor.focusToNameField(); } else { myEditor.focusToExtensionField(); } } }); return false; } } private void fireListChanged() { myCurrentTab.fireDataChanged(); if (myMainPanel != null) { myMainPanel.revalidate(); } } public void apply() throws ConfigurationException { if (myEditor != null && myEditor.isModified()) { myModified = true; myEditor.apply(); } String[] errorString = new String[1]; if (!canApply(false, errorString)) { throw new ConfigurationException(errorString[0]); } // Apply templates ArrayList newModifiedItems = new ArrayList(); FileTemplate[] templates = myTemplatesList.getTemplates(); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; newModifiedItems.add(template); } FileTemplateManager templatesManager = FileTemplateManager.getInstance(); apply(newModifiedItems, myTemplatesList.savedTemplates, TEMPLATE_ID, templatesManager.getAllTemplates()); // Apply patterns newModifiedItems = new ArrayList(); templates = myPatternsList.getTemplates(); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; newModifiedItems.add(template); } apply(newModifiedItems, myPatternsList.savedTemplates, PATTERN_ID, templatesManager.getAllPatterns()); //Apply code templates newModifiedItems = new ArrayList(); templates = myCodeTemplatesList.getTemplates(); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; newModifiedItems.add(template); } apply(newModifiedItems, myCodeTemplatesList.savedTemplates, CODE_ID, templatesManager.getAllCodeTemplates()); //Apply J2EE templates newModifiedItems = new ArrayList(); templates = myJ2eeTemplatesList.getTemplates(); for (int i = 0; i < templates.length; i++) { FileTemplate template = templates[i]; newModifiedItems.add(template); } apply(newModifiedItems, myJ2eeTemplatesList.savedTemplates, J2EE_ID, templatesManager.getAllJ2eeTemplates()); FileTemplateManager.getInstance().saveAll(); if (myEditor != null) { myModified = false; fireListChanged(); reset(); } } private static void removeTemplate(FileTemplate aTemplate, int listId, boolean fromDiskOnly) { FileTemplateManager manager = FileTemplateManager.getInstance(); if (listId == AllFileTemplatesConfigurable.TEMPLATE_ID) { if (!aTemplate.isInternal()) { manager.removeTemplate(aTemplate, fromDiskOnly); } else { manager.removeInternal(aTemplate); } } else if (listId == PATTERN_ID) { manager.removePattern(aTemplate, fromDiskOnly); } else if (listId == CODE_ID) { manager.removeCodeTemplate(aTemplate, fromDiskOnly); } else if (listId == J2EE_ID) { manager.removeJ2eeTemplate(aTemplate, fromDiskOnly); } } private static void apply(ArrayList newModifiedItems, Map savedTemplate2ModifiedTemplate, int listId, FileTemplate[] templates) { FileTemplateManager templatesManager = FileTemplateManager.getInstance(); if (listId == TEMPLATE_ID) { FileTemplate[] internals = templatesManager.getInternalTemplates(); templates = ArrayUtil.mergeArrays(internals, templates, FileTemplate.class); } ArrayList savedTemplates = new ArrayList(); // Delete removed and fill savedTemplates for (int i = 0; i < templates.length; i++) { FileTemplate aTemplate = templates[i]; FileTemplate aModifiedTemplate = (FileTemplate)savedTemplate2ModifiedTemplate.get(aTemplate); if (newModifiedItems.contains(aModifiedTemplate)) { savedTemplates.add(aTemplate); } else { removeTemplate(aTemplate, listId, false); savedTemplate2ModifiedTemplate.remove(aTemplate); } } // Now all removed templates deleted from table, savedTemplates contains all templates in table for (Iterator iterator = savedTemplates.iterator(); iterator.hasNext();) { FileTemplate aTemplate = (FileTemplate)iterator.next(); FileTemplate aModifiedTemplate = (FileTemplate)savedTemplate2ModifiedTemplate.get(aTemplate); LOG.assertTrue(aModifiedTemplate != null); aTemplate.setAdjust(aModifiedTemplate.isAdjust()); if (!aModifiedTemplate.isDefault()) { FileTemplateUtil.copyTemplate(aModifiedTemplate, aTemplate); } else { if (!aTemplate.isDefault()) { removeTemplate(aTemplate, listId, true); } } } // Add new templates to table for (Iterator iterator = newModifiedItems.iterator(); iterator.hasNext();) { FileTemplate aModifiedTemplate = (FileTemplate)iterator.next(); LOG.assertTrue(aModifiedTemplate != null); if (!savedTemplate2ModifiedTemplate.containsValue(aModifiedTemplate)) { if (listId == AllFileTemplatesConfigurable.TEMPLATE_ID) { templatesManager.addTemplate(aModifiedTemplate.getName(), aModifiedTemplate.getExtension()).setText(aModifiedTemplate.getText()); } else if (listId == AllFileTemplatesConfigurable.PATTERN_ID) { templatesManager.addPattern(aModifiedTemplate.getName(), aModifiedTemplate.getExtension()).setText(aModifiedTemplate.getText()); } else if (listId == CODE_ID) { templatesManager.addCodeTemplate(aModifiedTemplate.getName(), aModifiedTemplate.getExtension()).setText(aModifiedTemplate.getText()); } else if (listId == J2EE_ID) { templatesManager.addJ2eeTemplate(aModifiedTemplate.getName(), aModifiedTemplate.getExtension()).setText(aModifiedTemplate.getText()); } } } } public void reset() { myEditor.reset(); initLists(); myModified = false; } public void disposeUIResources() { if (myEditor != null) { myEditor.disposeUIResources(); myEditor = null; myEditorComponent = null; } if (myTabbedPane != null) { myTabbedPane.uninstallKeyboardNavigation(); } myMainPanel = null; } public JComponent getPreferredFocusedComponent() { return myCurrentTab.getComponent(); } public void createNewTemplate(String preferredName, String extension, String text) { createTemplate(preferredName, extension, text); } }
Adding node for jspx file template
source/com/intellij/ide/fileTemplates/impl/AllFileTemplatesConfigurable.java
Adding node for jspx file template
<ide><path>ource/com/intellij/ide/fileTemplates/impl/AllFileTemplatesConfigurable.java <ide> new TreeNode(StdFileTypes.XML.getIcon(), J2EEFileTemplateNames.WEB_XML_24), <ide> }), <ide> new TreeNode("Jsp files", StdFileTypes.JSP.getIcon(), new TreeNode[]{ <del> new TreeNode(StdFileTypes.JSP.getIcon(), J2EEFileTemplateNames.JSP_FILE) <add> new TreeNode(StdFileTypes.JSP.getIcon(), J2EEFileTemplateNames.JSP_FILE), <add> new TreeNode(StdFileTypes.JSPX.getIcon(), J2EEFileTemplateNames.JSPX_FILE) <ide> }), <ide> })); <ide>
Java
apache-2.0
558258be48bbb3a8f6dcc24ae6cf4748546d8f56
0
leopardoooo/cambodia,leopardoooo/cambodia,leopardoooo/cambodia,leopardoooo/cambodia
package com.ycsoft.business.component.core; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.google.gson.Gson; import com.ycsoft.beans.core.common.CDoneCode; import com.ycsoft.beans.core.common.CDoneCodeDetail; import com.ycsoft.beans.core.common.CDoneCodeInfo; import com.ycsoft.beans.core.fee.CFee; import com.ycsoft.beans.core.user.CUser; import com.ycsoft.business.commons.abstracts.BaseBusiComponent; import com.ycsoft.business.dao.config.TBusiConfirmDao; import com.ycsoft.business.dao.core.common.CDoneCodeInfoDao; import com.ycsoft.business.dao.core.common.CDoneCodeUnpayDao; import com.ycsoft.business.dao.core.cust.CCustDao; import com.ycsoft.business.dao.core.fee.CFeeDao; import com.ycsoft.business.dao.core.user.CUserDao; import com.ycsoft.business.dao.core.user.CUserHisDao; import com.ycsoft.business.dto.core.cust.DoneCodeDto; import com.ycsoft.business.dto.core.cust.DoneCodeExtAttrDto; import com.ycsoft.business.dto.core.cust.DoneInfoDto; import com.ycsoft.business.dto.core.cust.ExtAttributeDto; import com.ycsoft.business.dto.core.fee.FeeDto; import com.ycsoft.business.dto.core.fee.QueryFeeInfo; import com.ycsoft.commons.constants.BusiCodeConstants; import com.ycsoft.commons.constants.StatusConstants; import com.ycsoft.commons.constants.SystemConstants; import com.ycsoft.commons.exception.ComponentException; import com.ycsoft.commons.helper.CollectionHelper; import com.ycsoft.commons.helper.StringHelper; import com.ycsoft.daos.core.JDBCException; import com.ycsoft.daos.core.Pager; /** * 通用业务组件,包括以下功能 * 1、保存业务流水及流水明细 * 2、保存需要打印的业务单据 * * @author pyb * * Mar 16, 2010 * */ @Component public class DoneCodeComponent extends BaseBusiComponent { private CDoneCodeInfoDao cDoneCodeInfoDao; private TBusiConfirmDao tBusiConfirmDao; private CDoneCodeUnpayDao cDoneCodeUnpayDao; @Autowired private CCustDao cCustDao; @Autowired private CFeeDao cFeeDao; @Autowired private CUserDao cUserDao; @Autowired private CUserHisDao cUserHisDao; /** * 给业务增加用户锁,防止并发临界时数据不一致。 * @param cust_id * @throws JDBCException */ public boolean lockCust(String cust_id) throws JDBCException{ if( cCustDao.lockCust(cust_id)==null){ return false; }else{ return true; } } public void saveDoneCodeDetail(Integer doneCode, String custId, List<String> userIdList) throws Exception { CDoneCodeDetail[] detail = new CDoneCodeDetail[userIdList.size()]; for( int i=0;i<userIdList.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custId); detail[i].setUser_id(userIdList.get(i)); detail[i].setArea_id(getOptr().getArea_id()); detail[i].setCounty_id(getOptr().getCounty_id()); } cDoneCodeDetailDao.save(detail); } public void saveDoneCode(Integer doneCode, String busiCode, String addrId) throws Exception { CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setBusi_code(busiCode); cDoneCode.setStatus(StatusConstants.ACTIVE); cDoneCode.setAddr_id(addrId); cDoneCode.setService_channel(SystemConstants.SERVICE_CHANNEL_YYT); setBaseInfo(cDoneCode); cDoneCodeDao.save(cDoneCode); } /** * 保存未支付业务 * @param cust_id * @param done_code * @throws JDBCException */ public void saveDoneCodeUnPay(String cust_id,Integer done_code,String optr_id) throws Exception{ if(cDoneCodeUnpayDao.findByKey(done_code)!=null){ return;//已经保存过,不需要重复保存 } cDoneCodeUnpayDao.saveUnpay(cust_id, done_code,optr_id); } /** * 业务被其他营业员锁定检查 * @param cust_id * @param done_code * @param optr_id * @throws Exception public void checkUnPayOtherLock(String cust_id,String optr_id)throws Exception{ List<CDoneCodeUnpay> otherLocks=cDoneCodeUnpayDao.queryUnPayOtherLock(cust_id,optr_id); if(otherLocks!=null&&otherLocks.size()>0){ String login_name = MemoryDict.getDictName(DictKey.OPTR_LOGIN, otherLocks.get(0).getOptr_id()); String optr_name=MemoryDict.getDictName(DictKey.OPTR, otherLocks.get(0).getOptr_id()); throw new ComponentException(ErrorCode.UnPayLock,optr_name,login_name); } } */ /** * * @param doneCode * @return * @throws JDBCException public CDoneCodeUnpay queryDoneCodeUnPayByKey(Integer doneCode) throws JDBCException{ return cDoneCodeUnpayDao.findByKey(doneCode); } */ /** * 加锁查询未支付业务 * @param cust_id * @return * @throws JDBCException public List<CDoneCodeUnpay> queryUnPayList(String cust_id) throws JDBCException{ return cDoneCodeUnpayDao.queryUnPay(cust_id); } */ /** * 查询一个营业员的未支付业务 * @param optr_id * @return * @throws JDBCException public List<CDoneCodeUnpay> queryUnPayByOptr(String optr_id) throws JDBCException{ return cDoneCodeUnpayDao.queryUnPayByOptr(optr_id); } */ /** * 删除未支付业务信息 * @param unPayList * @throws JDBCException */ public void deleteDoneCodeUnPay(List<FeeDto> feeList) throws JDBCException{ Set<Integer> doneCodeSet=new HashSet<>(); for(FeeDto fee:feeList){ doneCodeSet.add(fee.getCreate_done_code()); } for(Integer doneCode:doneCodeSet){ List<CFee> list=cFeeDao.queryUnPayByDoneCode(doneCode); if(list==null||list.size()==0){ //不存在未支付则删除未支付业务流水号 cDoneCodeUnpayDao.remove(doneCode); } } } public void updateStatus(Integer doneCode,String busiCode) throws Exception{ CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setStatus(StatusConstants.INVALID); cDoneCodeDao.update(cDoneCode); cDoneCodeDao.saveCancel(doneCode,busiCode); } /** * 获取业务流水 * */ public Integer gDoneCode() throws Exception{ return Integer.parseInt(cDoneCodeDao.findSequence().toString()); } public CDoneCode queryByKey(Integer doneCode) throws Exception{ return cDoneCodeDao.findByKey(doneCode); } public Pager<DoneInfoDto> getUserOpenDate(Integer cDoneCode, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getUserOpenDate(cDoneCode, start, limit); } public Pager<DoneInfoDto> getOrderProdDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getOrderProdDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getBandUpgradeDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getBandUpgradeDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getDeviceChangeDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getDeviceChangeDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getDeviceBuyDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getDeviceBuyDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getPromotionDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getPromotionDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getPromFeeDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getPromFeeDate(cDoneCode, countyId, start, limit); } /** * 查找donecode之前可以回退且不可忽略的记录条数 * @param doneCode * @param custId * @return * @throws Exception */ public int getNeedCancleCount(Integer doneCode,String custId) throws Exception{ List<CDoneCode> list = cDoneCodeDao.queryAfterDoneCode(doneCode, custId, getOptr().getCounty_id()); return list== null?0:list.size(); } /** * 根据用户ID, 查询用户受理记录 * @param userId */ public List<CDoneCode> queryByUserId(String userId) throws Exception{ List<CDoneCode> doneCodeList = cDoneCodeDao.queryUserDoneCode(userId, getOptr().getCounty_id()); return doneCodeList; } /** * 查询用户几个月内临时授权的次数 * @param userId * @param months * @return * @throws Exception */ public int queryOpenTempTimes(String userId,int months) throws Exception{ return cDoneCodeDao.queryOpenTempTimes(userId, months,getOptr().getCounty_id()); } /** * 通过客户ID,查询客户受理记录,并关联业务的扩展信息 * @param custId */ public Pager<DoneCodeExtAttrDto> queryByCustId(String custId, QueryFeeInfo queryFeeInfo, Integer start,Integer limit)throws Exception{ Pager<DoneCodeDto> pageLstDone = cDoneCodeDao.queryCustDoneCode(custId, queryFeeInfo, getOptr().getCounty_id(),start,limit); Pager<DoneCodeExtAttrDto> pageTarget = new Pager<DoneCodeExtAttrDto>(); List<DoneCodeExtAttrDto> target = new ArrayList<DoneCodeExtAttrDto>(); pageTarget.setRecords(target); pageTarget.setStart(pageLstDone.getStart()); pageTarget.setLimit(pageLstDone.getLimit()); pageTarget.setTotalProperty(pageLstDone.getTotalProperty()); ExtAttributeDto temp = null ; DoneCodeExtAttrDto tempQ = null; List<DoneCodeDto> lstDone = pageLstDone.getRecords(); String[] doneCodeArr = CollectionHelper.converValueToArray(lstDone, "done_code"); List<DoneCodeDto> queryCfeeByDoneCode = cDoneCodeDao.queryCfeeByDoneCode(doneCodeArr,custId); Map<String, List<DoneCodeDto>> map2 = CollectionHelper.converToMap(queryCfeeByDoneCode, "reverse_done_code"); for (DoneCodeDto doneCodeDto : lstDone) { temp = new ExtAttributeDto( doneCodeDto ); tempQ = null; for (DoneCodeExtAttrDto q : target) { if(q.getDone_code().equals(doneCodeDto.getDone_code())){ tempQ = q; break; } } if(null == tempQ){ tempQ = new DoneCodeExtAttrDto(); //增加作废的负金额 List<DoneCodeDto> list = map2.get(doneCodeDto.getDone_code().toString()); if(CollectionHelper.isNotEmpty(list)){ Integer realPay = 0; for(DoneCodeDto d:list){ realPay += d.getReal_pay(); } doneCodeDto.setReal_pay(0-realPay); } BeanUtils.copyProperties(doneCodeDto, tempQ); target.add(tempQ); } if(StringHelper.isNotEmpty(doneCodeDto.getAttribute_id())){ StringBuffer str = new StringBuffer(); if(StringHelper.isNotEmpty(temp.getAttribute_value())){ str.append("["+temp.getAttribute_name()+"]:"+temp.getAttribute_value()+";"); if(StringHelper.isNotEmpty(tempQ.getAttr_remark())){ str.append(tempQ.getAttr_remark()); } tempQ.setAttr_remark(str.toString()); } tempQ.getExtAttrs().add(temp); } if(doneCodeDto.getBusi_code().equals(BusiCodeConstants.USER_OPEN)){ CUser user = cUserDao.findByKey(doneCodeDto.getUser_id()); if(user == null ){ user = (CUser)cUserHisDao.findByKey(doneCodeDto.getUser_id()); } String str = ""; if(user!=null){ if(str.equals(SystemConstants.USER_TYPE_BAND) && StringHelper.isNotEmpty(user.getModem_mac())){ str += user.getUser_type()+" Modem: "+user.getModem_mac(); }else if(StringHelper.isNotEmpty(user.getStb_id())){ str += user.getUser_type()+" Stb: "+user.getStb_id(); } } tempQ.setRemark(str); } } return pageTarget; } public List<CDoneCodeDetail> queryDetail(Integer doneCode) throws Exception{ return cDoneCodeDetailDao.queryDetail(doneCode); } public void saveDoneCodeInfo(Integer doneCode, String custId, String userId,Object info) throws Exception { CDoneCodeInfo doneCodeInfo = new CDoneCodeInfo(); doneCodeInfo.setDone_code(doneCode); doneCodeInfo.setArea_id(getOptr().getArea_id()); doneCodeInfo.setCounty_id(getOptr().getCounty_id()); doneCodeInfo.setUser_id(userId); if(info == null){ doneCodeInfo.setInfo(""); }else{ String strInfo = new Gson().toJson(info); if(strInfo.length()<3000){ doneCodeInfo.setInfo(strInfo); }else{ int mNum = (strInfo.length()%3000); int strNum = (strInfo.length()/3000); if(mNum>0){ strNum++; } if(strNum>11){ throw new Exception("所选用户数太多,无法生成业务单,请重新选择用户进行业务操作!"); } for(int i=1;i<=strNum;i++){ if(i==strNum){ if(i==1) doneCodeInfo.setInfo(strInfo); if(i==2) doneCodeInfo.setInfo1(strInfo.substring((i-1)*3000)); if(i==3) doneCodeInfo.setInfo2(strInfo.substring((i-1)*3000)); if(i==4) doneCodeInfo.setInfo3(strInfo.substring((i-1)*3000)); if(i==5) doneCodeInfo.setInfo4(strInfo.substring((i-1)*3000)); if(i==6) doneCodeInfo.setInfo5(strInfo.substring((i-1)*3000)); if(i==6) doneCodeInfo.setInfo6(strInfo.substring((i-1)*3000)); if(i==8) doneCodeInfo.setInfo7(strInfo.substring((i-1)*3000)); if(i==9) doneCodeInfo.setInfo8(strInfo.substring((i-1)*3000)); if(i==10) doneCodeInfo.setInfo9(strInfo.substring((i-1)*3000)); if(i==11) doneCodeInfo.setInfo10(strInfo.substring((i-1)*3000)); }else{ if(i==1) doneCodeInfo.setInfo(strInfo.substring(0,i*3000)); if(i==2) doneCodeInfo.setInfo1(strInfo.substring((i-1)*3000,i*3000)); if(i==3) doneCodeInfo.setInfo2(strInfo.substring((i-1)*3000,i*3000)); if(i==4) doneCodeInfo.setInfo3(strInfo.substring((i-1)*3000,i*3000)); if(i==5) doneCodeInfo.setInfo4(strInfo.substring((i-1)*3000,i*3000)); if(i==6) doneCodeInfo.setInfo5(strInfo.substring((i-1)*4000,i*4000)); if(i==7) doneCodeInfo.setInfo6(strInfo.substring((i-1)*3000,i*3000)); if(i==8) doneCodeInfo.setInfo7(strInfo.substring((i-1)*3000,i*3000)); if(i==9) doneCodeInfo.setInfo8(strInfo.substring((i-1)*3000,i*3000)); if(i==10) doneCodeInfo.setInfo9(strInfo.substring((i-1)*3000,i*3000)); if(i==11) doneCodeInfo.setInfo10(strInfo.substring((i-1)*3000,i*3000)); } } } } doneCodeInfo.setCust_id(custId); cDoneCodeInfoDao.save(doneCodeInfo); } /** * 保存业务流水 * @param doneCode 流水号 * @param busiCode 业务编号 * @param custId 客户编号 * @param userIds 用户编号数组 * @throws Exception */ public void saveDoneCode(Integer doneCode,String busiCode,String remark, String deptId, String countyId, String areaId,String custId, List<String> userIds) throws Exception{ if (StringHelper.isEmpty(busiCode)) throw new ComponentException("业务代码为空"); if (0 == doneCode) throw new ComponentException("业务流水为空"); //保存流水 CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setBusi_code(busiCode); cDoneCode.setStatus(StatusConstants.ACTIVE); cDoneCode.setRemark(remark); cDoneCode.setCounty_id(countyId); cDoneCode.setArea_id(areaId); cDoneCode.setDept_id(deptId); cDoneCode.setOptr_id(getOptr().getOptr_id()); cDoneCodeDao.save(cDoneCode); //保存流水明细 if (StringHelper.isNotEmpty(custId)){ if (userIds != null && userIds.size() > 0) { CDoneCodeDetail[] detail = new CDoneCodeDetail[userIds.size()]; for( int i=0;i<userIds.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custId); detail[i].setUser_id(userIds.get(i).toString()); detail[i].setArea_id(areaId); detail[i].setCounty_id(countyId); } cDoneCodeDetailDao.save(detail); } else { CDoneCodeDetail detail = new CDoneCodeDetail(); detail.setDone_code(doneCode); detail.setCust_id(custId); detail.setArea_id(areaId); detail.setCounty_id(countyId); cDoneCodeDetailDao.save(detail); } } } /** * 保存业务流水 * @param doneCode 流水号 * @param busiCode 业务编号 * @param custId 客户编号 * @param userIds 用户编号数组 * @throws Exception */ public void saveDoneCode(Integer doneCode,String busiCode,String remark ,String custId, List<String> userIds,String addr_id,String service_channel) throws Exception{ if (StringHelper.isEmpty(busiCode)) throw new ComponentException("业务代码为空"); if (0 == doneCode) throw new ComponentException("业务流水为空"); // if (StringHelper.isEmpty(custId)) // throw new ComponentException("客户id为空"); //保存流水 CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setBusi_code(busiCode); cDoneCode.setStatus(StatusConstants.ACTIVE); cDoneCode.setRemark(remark); cDoneCode.setAddr_id(addr_id); cDoneCode.setService_channel(service_channel); setBaseInfo(cDoneCode); cDoneCodeDao.save(cDoneCode); //保存流水明细 if (StringHelper.isNotEmpty(custId)){ if (userIds != null && userIds.size() > 0) { CDoneCodeDetail[] detail = new CDoneCodeDetail[userIds.size()]; for( int i=0;i<userIds.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custId); if(StringHelper.isNotEmpty(userIds.get(i))){//lxr临时添加 detail[i].setUser_id(userIds.get(i).toString()); } detail[i].setArea_id(getOptr().getArea_id()); detail[i].setCounty_id(getOptr().getCounty_id()); } cDoneCodeDetailDao.save(detail); } else { CDoneCodeDetail detail = new CDoneCodeDetail(); detail.setDone_code(doneCode); detail.setCust_id(custId); detail.setArea_id(getOptr().getArea_id()); detail.setCounty_id(getOptr().getCounty_id()); cDoneCodeDetailDao.save(detail); } } } public void saveBatchDoneCode(Integer doneCode,String busiCode,String remark ,List<String> custIds, List<String> userIds) throws Exception{ if (StringHelper.isEmpty(busiCode)) throw new ComponentException("业务代码为空"); if (0 == doneCode) throw new ComponentException("业务流水为空"); // if (StringHelper.isEmpty(custId)) // throw new ComponentException("客户id为空"); //保存流水 CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setBusi_code(busiCode); cDoneCode.setStatus(StatusConstants.ACTIVE); cDoneCode.setRemark(remark); setBaseInfo(cDoneCode); cDoneCodeDao.save(cDoneCode); //保存流水明细 if (custIds != null && custIds.size() > 0){ CDoneCodeDetail[] detail = new CDoneCodeDetail[custIds.size()]; if(userIds != null && userIds.size() > 0){ for( int i=0;i<custIds.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custIds.get(i)); detail[i].setUser_id(userIds.get(i)); detail[i].setArea_id(getOptr().getArea_id()); detail[i].setCounty_id(getOptr().getCounty_id()); } }else{ for( int i=0;i<custIds.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custIds.get(i)); detail[i].setArea_id(getOptr().getArea_id()); detail[i].setCounty_id(getOptr().getCounty_id()); } } cDoneCodeDetailDao.save(detail); } } public void editRemark(int doneCode,String remark) throws Exception { cDoneCodeDao.updateRemark(doneCode, remark); } public void cancelDoneCode(Integer doneCode) throws Exception{ cDoneCodeDao.delete(doneCode); } /** * @param doneCodeInfoDao the cDoneCodeInfoDao to set */ public void setCDoneCodeInfoDao(CDoneCodeInfoDao doneCodeInfoDao) { cDoneCodeInfoDao = doneCodeInfoDao; } public void setTBusiConfirmDao(TBusiConfirmDao tBusiConfirmDao) { this.tBusiConfirmDao = tBusiConfirmDao; } public void setCDoneCodeUnpayDao(CDoneCodeUnpayDao cDoneCodeUnpayDao) { this.cDoneCodeUnpayDao = cDoneCodeUnpayDao; } }
boss-core/src/main/java/com/ycsoft/business/component/core/DoneCodeComponent.java
package com.ycsoft.business.component.core; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import com.google.gson.Gson; import com.ycsoft.beans.core.common.CDoneCode; import com.ycsoft.beans.core.common.CDoneCodeDetail; import com.ycsoft.beans.core.common.CDoneCodeInfo; import com.ycsoft.beans.core.fee.CFee; import com.ycsoft.beans.core.user.CUser; import com.ycsoft.business.commons.abstracts.BaseBusiComponent; import com.ycsoft.business.dao.config.TBusiConfirmDao; import com.ycsoft.business.dao.core.common.CDoneCodeInfoDao; import com.ycsoft.business.dao.core.common.CDoneCodeUnpayDao; import com.ycsoft.business.dao.core.cust.CCustDao; import com.ycsoft.business.dao.core.fee.CFeeDao; import com.ycsoft.business.dao.core.user.CUserDao; import com.ycsoft.business.dao.core.user.CUserHisDao; import com.ycsoft.business.dto.core.cust.DoneCodeDto; import com.ycsoft.business.dto.core.cust.DoneCodeExtAttrDto; import com.ycsoft.business.dto.core.cust.DoneInfoDto; import com.ycsoft.business.dto.core.cust.ExtAttributeDto; import com.ycsoft.business.dto.core.fee.FeeDto; import com.ycsoft.business.dto.core.fee.QueryFeeInfo; import com.ycsoft.commons.constants.BusiCodeConstants; import com.ycsoft.commons.constants.StatusConstants; import com.ycsoft.commons.constants.SystemConstants; import com.ycsoft.commons.exception.ComponentException; import com.ycsoft.commons.helper.CollectionHelper; import com.ycsoft.commons.helper.StringHelper; import com.ycsoft.daos.core.JDBCException; import com.ycsoft.daos.core.Pager; /** * 通用业务组件,包括以下功能 * 1、保存业务流水及流水明细 * 2、保存需要打印的业务单据 * * @author pyb * * Mar 16, 2010 * */ @Component public class DoneCodeComponent extends BaseBusiComponent { private CDoneCodeInfoDao cDoneCodeInfoDao; private TBusiConfirmDao tBusiConfirmDao; private CDoneCodeUnpayDao cDoneCodeUnpayDao; @Autowired private CCustDao cCustDao; @Autowired private CFeeDao cFeeDao; @Autowired private CUserDao cUserDao; @Autowired private CUserHisDao cUserHisDao; /** * 给业务增加用户锁,防止并发临界时数据不一致。 * @param cust_id * @throws JDBCException */ public boolean lockCust(String cust_id) throws JDBCException{ if( cCustDao.lockCust(cust_id)==null){ return false; }else{ return true; } } /** * 保存未支付业务 * @param cust_id * @param done_code * @throws JDBCException */ public void saveDoneCodeUnPay(String cust_id,Integer done_code,String optr_id) throws Exception{ if(cDoneCodeUnpayDao.findByKey(done_code)!=null){ return;//已经保存过,不需要重复保存 } cDoneCodeUnpayDao.saveUnpay(cust_id, done_code,optr_id); } /** * 业务被其他营业员锁定检查 * @param cust_id * @param done_code * @param optr_id * @throws Exception public void checkUnPayOtherLock(String cust_id,String optr_id)throws Exception{ List<CDoneCodeUnpay> otherLocks=cDoneCodeUnpayDao.queryUnPayOtherLock(cust_id,optr_id); if(otherLocks!=null&&otherLocks.size()>0){ String login_name = MemoryDict.getDictName(DictKey.OPTR_LOGIN, otherLocks.get(0).getOptr_id()); String optr_name=MemoryDict.getDictName(DictKey.OPTR, otherLocks.get(0).getOptr_id()); throw new ComponentException(ErrorCode.UnPayLock,optr_name,login_name); } } */ /** * * @param doneCode * @return * @throws JDBCException public CDoneCodeUnpay queryDoneCodeUnPayByKey(Integer doneCode) throws JDBCException{ return cDoneCodeUnpayDao.findByKey(doneCode); } */ /** * 加锁查询未支付业务 * @param cust_id * @return * @throws JDBCException public List<CDoneCodeUnpay> queryUnPayList(String cust_id) throws JDBCException{ return cDoneCodeUnpayDao.queryUnPay(cust_id); } */ /** * 查询一个营业员的未支付业务 * @param optr_id * @return * @throws JDBCException public List<CDoneCodeUnpay> queryUnPayByOptr(String optr_id) throws JDBCException{ return cDoneCodeUnpayDao.queryUnPayByOptr(optr_id); } */ /** * 删除未支付业务信息 * @param unPayList * @throws JDBCException */ public void deleteDoneCodeUnPay(List<FeeDto> feeList) throws JDBCException{ Set<Integer> doneCodeSet=new HashSet<>(); for(FeeDto fee:feeList){ doneCodeSet.add(fee.getCreate_done_code()); } for(Integer doneCode:doneCodeSet){ List<CFee> list=cFeeDao.queryUnPayByDoneCode(doneCode); if(list==null||list.size()==0){ //不存在未支付则删除未支付业务流水号 cDoneCodeUnpayDao.remove(doneCode); } } } public void updateStatus(Integer doneCode,String busiCode) throws Exception{ CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setStatus(StatusConstants.INVALID); cDoneCodeDao.update(cDoneCode); cDoneCodeDao.saveCancel(doneCode,busiCode); } /** * 获取业务流水 * */ public Integer gDoneCode() throws Exception{ return Integer.parseInt(cDoneCodeDao.findSequence().toString()); } public CDoneCode queryByKey(Integer doneCode) throws Exception{ return cDoneCodeDao.findByKey(doneCode); } public Pager<DoneInfoDto> getUserOpenDate(Integer cDoneCode, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getUserOpenDate(cDoneCode, start, limit); } public Pager<DoneInfoDto> getOrderProdDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getOrderProdDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getBandUpgradeDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getBandUpgradeDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getDeviceChangeDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getDeviceChangeDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getDeviceBuyDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getDeviceBuyDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getPromotionDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getPromotionDate(cDoneCode, countyId, start, limit); } public Pager<DoneInfoDto> getPromFeeDate(Integer cDoneCode,String countyId, Integer start, Integer limit) throws Exception { return cDoneCodeDao.getPromFeeDate(cDoneCode, countyId, start, limit); } /** * 查找donecode之前可以回退且不可忽略的记录条数 * @param doneCode * @param custId * @return * @throws Exception */ public int getNeedCancleCount(Integer doneCode,String custId) throws Exception{ List<CDoneCode> list = cDoneCodeDao.queryAfterDoneCode(doneCode, custId, getOptr().getCounty_id()); return list== null?0:list.size(); } /** * 根据用户ID, 查询用户受理记录 * @param userId */ public List<CDoneCode> queryByUserId(String userId) throws Exception{ List<CDoneCode> doneCodeList = cDoneCodeDao.queryUserDoneCode(userId, getOptr().getCounty_id()); return doneCodeList; } /** * 查询用户几个月内临时授权的次数 * @param userId * @param months * @return * @throws Exception */ public int queryOpenTempTimes(String userId,int months) throws Exception{ return cDoneCodeDao.queryOpenTempTimes(userId, months,getOptr().getCounty_id()); } /** * 通过客户ID,查询客户受理记录,并关联业务的扩展信息 * @param custId */ public Pager<DoneCodeExtAttrDto> queryByCustId(String custId, QueryFeeInfo queryFeeInfo, Integer start,Integer limit)throws Exception{ Pager<DoneCodeDto> pageLstDone = cDoneCodeDao.queryCustDoneCode(custId, queryFeeInfo, getOptr().getCounty_id(),start,limit); Pager<DoneCodeExtAttrDto> pageTarget = new Pager<DoneCodeExtAttrDto>(); List<DoneCodeExtAttrDto> target = new ArrayList<DoneCodeExtAttrDto>(); pageTarget.setRecords(target); pageTarget.setStart(pageLstDone.getStart()); pageTarget.setLimit(pageLstDone.getLimit()); pageTarget.setTotalProperty(pageLstDone.getTotalProperty()); ExtAttributeDto temp = null ; DoneCodeExtAttrDto tempQ = null; List<DoneCodeDto> lstDone = pageLstDone.getRecords(); String[] doneCodeArr = CollectionHelper.converValueToArray(lstDone, "done_code"); List<DoneCodeDto> queryCfeeByDoneCode = cDoneCodeDao.queryCfeeByDoneCode(doneCodeArr,custId); Map<String, List<DoneCodeDto>> map2 = CollectionHelper.converToMap(queryCfeeByDoneCode, "reverse_done_code"); for (DoneCodeDto doneCodeDto : lstDone) { temp = new ExtAttributeDto( doneCodeDto ); tempQ = null; for (DoneCodeExtAttrDto q : target) { if(q.getDone_code().equals(doneCodeDto.getDone_code())){ tempQ = q; break; } } if(null == tempQ){ tempQ = new DoneCodeExtAttrDto(); //增加作废的负金额 List<DoneCodeDto> list = map2.get(doneCodeDto.getDone_code().toString()); if(CollectionHelper.isNotEmpty(list)){ Integer realPay = 0; for(DoneCodeDto d:list){ realPay += d.getReal_pay(); } doneCodeDto.setReal_pay(0-realPay); } BeanUtils.copyProperties(doneCodeDto, tempQ); target.add(tempQ); } if(StringHelper.isNotEmpty(doneCodeDto.getAttribute_id())){ StringBuffer str = new StringBuffer(); if(StringHelper.isNotEmpty(temp.getAttribute_value())){ str.append("["+temp.getAttribute_name()+"]:"+temp.getAttribute_value()+";"); if(StringHelper.isNotEmpty(tempQ.getAttr_remark())){ str.append(tempQ.getAttr_remark()); } tempQ.setAttr_remark(str.toString()); } tempQ.getExtAttrs().add(temp); } if(doneCodeDto.getBusi_code().equals(BusiCodeConstants.USER_OPEN)){ CUser user = cUserDao.findByKey(doneCodeDto.getUser_id()); if(user == null ){ user = (CUser)cUserHisDao.findByKey(doneCodeDto.getUser_id()); } String str = ""; if(user!=null){ if(str.equals(SystemConstants.USER_TYPE_BAND) && StringHelper.isNotEmpty(user.getModem_mac())){ str += user.getUser_type()+" Modem: "+user.getModem_mac(); }else if(StringHelper.isNotEmpty(user.getStb_id())){ str += user.getUser_type()+" Stb: "+user.getStb_id(); } } tempQ.setRemark(str); } } return pageTarget; } public List<CDoneCodeDetail> queryDetail(Integer doneCode) throws Exception{ return cDoneCodeDetailDao.queryDetail(doneCode); } public void saveDoneCodeInfo(Integer doneCode, String custId, String userId,Object info) throws Exception { CDoneCodeInfo doneCodeInfo = new CDoneCodeInfo(); doneCodeInfo.setDone_code(doneCode); doneCodeInfo.setArea_id(getOptr().getArea_id()); doneCodeInfo.setCounty_id(getOptr().getCounty_id()); doneCodeInfo.setUser_id(userId); if(info == null){ doneCodeInfo.setInfo(""); }else{ String strInfo = new Gson().toJson(info); if(strInfo.length()<3000){ doneCodeInfo.setInfo(strInfo); }else{ int mNum = (strInfo.length()%3000); int strNum = (strInfo.length()/3000); if(mNum>0){ strNum++; } if(strNum>11){ throw new Exception("所选用户数太多,无法生成业务单,请重新选择用户进行业务操作!"); } for(int i=1;i<=strNum;i++){ if(i==strNum){ if(i==1) doneCodeInfo.setInfo(strInfo); if(i==2) doneCodeInfo.setInfo1(strInfo.substring((i-1)*3000)); if(i==3) doneCodeInfo.setInfo2(strInfo.substring((i-1)*3000)); if(i==4) doneCodeInfo.setInfo3(strInfo.substring((i-1)*3000)); if(i==5) doneCodeInfo.setInfo4(strInfo.substring((i-1)*3000)); if(i==6) doneCodeInfo.setInfo5(strInfo.substring((i-1)*3000)); if(i==6) doneCodeInfo.setInfo6(strInfo.substring((i-1)*3000)); if(i==8) doneCodeInfo.setInfo7(strInfo.substring((i-1)*3000)); if(i==9) doneCodeInfo.setInfo8(strInfo.substring((i-1)*3000)); if(i==10) doneCodeInfo.setInfo9(strInfo.substring((i-1)*3000)); if(i==11) doneCodeInfo.setInfo10(strInfo.substring((i-1)*3000)); }else{ if(i==1) doneCodeInfo.setInfo(strInfo.substring(0,i*3000)); if(i==2) doneCodeInfo.setInfo1(strInfo.substring((i-1)*3000,i*3000)); if(i==3) doneCodeInfo.setInfo2(strInfo.substring((i-1)*3000,i*3000)); if(i==4) doneCodeInfo.setInfo3(strInfo.substring((i-1)*3000,i*3000)); if(i==5) doneCodeInfo.setInfo4(strInfo.substring((i-1)*3000,i*3000)); if(i==6) doneCodeInfo.setInfo5(strInfo.substring((i-1)*4000,i*4000)); if(i==7) doneCodeInfo.setInfo6(strInfo.substring((i-1)*3000,i*3000)); if(i==8) doneCodeInfo.setInfo7(strInfo.substring((i-1)*3000,i*3000)); if(i==9) doneCodeInfo.setInfo8(strInfo.substring((i-1)*3000,i*3000)); if(i==10) doneCodeInfo.setInfo9(strInfo.substring((i-1)*3000,i*3000)); if(i==11) doneCodeInfo.setInfo10(strInfo.substring((i-1)*3000,i*3000)); } } } } doneCodeInfo.setCust_id(custId); cDoneCodeInfoDao.save(doneCodeInfo); } /** * 保存业务流水 * @param doneCode 流水号 * @param busiCode 业务编号 * @param custId 客户编号 * @param userIds 用户编号数组 * @throws Exception */ public void saveDoneCode(Integer doneCode,String busiCode,String remark, String deptId, String countyId, String areaId,String custId, List<String> userIds) throws Exception{ if (StringHelper.isEmpty(busiCode)) throw new ComponentException("业务代码为空"); if (0 == doneCode) throw new ComponentException("业务流水为空"); //保存流水 CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setBusi_code(busiCode); cDoneCode.setStatus(StatusConstants.ACTIVE); cDoneCode.setRemark(remark); cDoneCode.setCounty_id(countyId); cDoneCode.setArea_id(areaId); cDoneCode.setDept_id(deptId); cDoneCode.setOptr_id(getOptr().getOptr_id()); cDoneCodeDao.save(cDoneCode); //保存流水明细 if (StringHelper.isNotEmpty(custId)){ if (userIds != null && userIds.size() > 0) { CDoneCodeDetail[] detail = new CDoneCodeDetail[userIds.size()]; for( int i=0;i<userIds.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custId); detail[i].setUser_id(userIds.get(i).toString()); detail[i].setArea_id(areaId); detail[i].setCounty_id(countyId); } cDoneCodeDetailDao.save(detail); } else { CDoneCodeDetail detail = new CDoneCodeDetail(); detail.setDone_code(doneCode); detail.setCust_id(custId); detail.setArea_id(areaId); detail.setCounty_id(countyId); cDoneCodeDetailDao.save(detail); } } } /** * 保存业务流水 * @param doneCode 流水号 * @param busiCode 业务编号 * @param custId 客户编号 * @param userIds 用户编号数组 * @throws Exception */ public void saveDoneCode(Integer doneCode,String busiCode,String remark ,String custId, List<String> userIds,String addr_id,String service_channel) throws Exception{ if (StringHelper.isEmpty(busiCode)) throw new ComponentException("业务代码为空"); if (0 == doneCode) throw new ComponentException("业务流水为空"); // if (StringHelper.isEmpty(custId)) // throw new ComponentException("客户id为空"); //保存流水 CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setBusi_code(busiCode); cDoneCode.setStatus(StatusConstants.ACTIVE); cDoneCode.setRemark(remark); cDoneCode.setAddr_id(addr_id); cDoneCode.setService_channel(service_channel); setBaseInfo(cDoneCode); cDoneCodeDao.save(cDoneCode); //保存流水明细 if (StringHelper.isNotEmpty(custId)){ if (userIds != null && userIds.size() > 0) { CDoneCodeDetail[] detail = new CDoneCodeDetail[userIds.size()]; for( int i=0;i<userIds.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custId); if(StringHelper.isNotEmpty(userIds.get(i))){//lxr临时添加 detail[i].setUser_id(userIds.get(i).toString()); } detail[i].setArea_id(getOptr().getArea_id()); detail[i].setCounty_id(getOptr().getCounty_id()); } cDoneCodeDetailDao.save(detail); } else { CDoneCodeDetail detail = new CDoneCodeDetail(); detail.setDone_code(doneCode); detail.setCust_id(custId); detail.setArea_id(getOptr().getArea_id()); detail.setCounty_id(getOptr().getCounty_id()); cDoneCodeDetailDao.save(detail); } } } public void saveBatchDoneCode(Integer doneCode,String busiCode,String remark ,List<String> custIds, List<String> userIds) throws Exception{ if (StringHelper.isEmpty(busiCode)) throw new ComponentException("业务代码为空"); if (0 == doneCode) throw new ComponentException("业务流水为空"); // if (StringHelper.isEmpty(custId)) // throw new ComponentException("客户id为空"); //保存流水 CDoneCode cDoneCode = new CDoneCode(); cDoneCode.setDone_code(doneCode); cDoneCode.setBusi_code(busiCode); cDoneCode.setStatus(StatusConstants.ACTIVE); cDoneCode.setRemark(remark); setBaseInfo(cDoneCode); cDoneCodeDao.save(cDoneCode); //保存流水明细 if (custIds != null && custIds.size() > 0){ CDoneCodeDetail[] detail = new CDoneCodeDetail[custIds.size()]; if(userIds != null && userIds.size() > 0){ for( int i=0;i<custIds.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custIds.get(i)); detail[i].setUser_id(userIds.get(i)); detail[i].setArea_id(getOptr().getArea_id()); detail[i].setCounty_id(getOptr().getCounty_id()); } }else{ for( int i=0;i<custIds.size();i++){ detail[i] = new CDoneCodeDetail(); detail[i].setDone_code(doneCode); detail[i].setCust_id(custIds.get(i)); detail[i].setArea_id(getOptr().getArea_id()); detail[i].setCounty_id(getOptr().getCounty_id()); } } cDoneCodeDetailDao.save(detail); } } public void editRemark(int doneCode,String remark) throws Exception { cDoneCodeDao.updateRemark(doneCode, remark); } public void cancelDoneCode(Integer doneCode) throws Exception{ cDoneCodeDao.delete(doneCode); } /** * @param doneCodeInfoDao the cDoneCodeInfoDao to set */ public void setCDoneCodeInfoDao(CDoneCodeInfoDao doneCodeInfoDao) { cDoneCodeInfoDao = doneCodeInfoDao; } public void setTBusiConfirmDao(TBusiConfirmDao tBusiConfirmDao) { this.tBusiConfirmDao = tBusiConfirmDao; } public void setCDoneCodeUnpayDao(CDoneCodeUnpayDao cDoneCodeUnpayDao) { this.cDoneCodeUnpayDao = cDoneCodeUnpayDao; } }
Signed-off-by: jpdan <[email protected]>
boss-core/src/main/java/com/ycsoft/business/component/core/DoneCodeComponent.java
<ide><path>oss-core/src/main/java/com/ycsoft/business/component/core/DoneCodeComponent.java <ide> return true; <ide> } <ide> } <add> <add> public void saveDoneCodeDetail(Integer doneCode, String custId, List<String> userIdList) throws Exception { <add> CDoneCodeDetail[] detail = new CDoneCodeDetail[userIdList.size()]; <add> for( int i=0;i<userIdList.size();i++){ <add> detail[i] = new CDoneCodeDetail(); <add> detail[i].setDone_code(doneCode); <add> detail[i].setCust_id(custId); <add> detail[i].setUser_id(userIdList.get(i)); <add> detail[i].setArea_id(getOptr().getArea_id()); <add> detail[i].setCounty_id(getOptr().getCounty_id()); <add> } <add> cDoneCodeDetailDao.save(detail); <add> } <add> <add> public void saveDoneCode(Integer doneCode, String busiCode, String addrId) throws Exception { <add> CDoneCode cDoneCode = new CDoneCode(); <add> cDoneCode.setDone_code(doneCode); <add> cDoneCode.setBusi_code(busiCode); <add> cDoneCode.setStatus(StatusConstants.ACTIVE); <add> cDoneCode.setAddr_id(addrId); <add> cDoneCode.setService_channel(SystemConstants.SERVICE_CHANNEL_YYT); <add> setBaseInfo(cDoneCode); <add> cDoneCodeDao.save(cDoneCode); <add> } <add> <ide> /** <ide> * 保存未支付业务 <ide> * @param cust_id
Java
apache-2.0
e647b2076dbe4cc8c9706a215c973fea539bb9d7
0
rajdavies/fabric8,rnc/fabric8,gashcrumb/fabric8,zmhassan/fabric8,EricWittmann/fabric8,jimmidyson/fabric8,christian-posta/fabric8,hekonsek/fabric8,mwringe/fabric8,dhirajsb/fabric8,jimmidyson/fabric8,rhuss/fabric8,rhuss/fabric8,chirino/fabric8v2,christian-posta/fabric8,dhirajsb/fabric8,mwringe/fabric8,christian-posta/fabric8,KurtStam/fabric8,chirino/fabric8v2,KurtStam/fabric8,jimmidyson/fabric8,rajdavies/fabric8,EricWittmann/fabric8,rhuss/fabric8,PhilHardwick/fabric8,hekonsek/fabric8,sobkowiak/fabric8,rnc/fabric8,rajdavies/fabric8,rnc/fabric8,EricWittmann/fabric8,hekonsek/fabric8,mwringe/fabric8,rnc/fabric8,sobkowiak/fabric8,sobkowiak/fabric8,migue/fabric8,zmhassan/fabric8,rnc/fabric8,dhirajsb/fabric8,gashcrumb/fabric8,dhirajsb/fabric8,hekonsek/fabric8,gashcrumb/fabric8,migue/fabric8,mwringe/fabric8,migue/fabric8,chirino/fabric8v2,EricWittmann/fabric8,PhilHardwick/fabric8,jimmidyson/fabric8,christian-posta/fabric8,KurtStam/fabric8,hekonsek/fabric8,sobkowiak/fabric8,migue/fabric8,chirino/fabric8v2,jimmidyson/fabric8,zmhassan/fabric8,KurtStam/fabric8,gashcrumb/fabric8,zmhassan/fabric8,rhuss/fabric8,PhilHardwick/fabric8,PhilHardwick/fabric8,rajdavies/fabric8
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.maven; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import io.fabric8.utils.Files; import io.fabric8.utils.Objects; import io.fabric8.utils.Strings; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.execution.MavenSession; import org.apache.maven.model.Plugin; import org.apache.maven.model.PluginExecution; import org.apache.maven.model.Profile; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.logging.Log; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.apache.maven.shared.dependency.tree.DependencyTreeBuilderException; import org.apache.maven.shared.invoker.DefaultInvocationRequest; import org.apache.maven.shared.invoker.DefaultInvoker; import org.apache.maven.shared.invoker.InvocationRequest; import org.apache.maven.shared.invoker.InvocationResult; import org.apache.maven.shared.invoker.Invoker; import org.apache.maven.shared.invoker.MavenInvocationException; /** * Generates a ZIP file of the App for the current maven project. */ @Mojo(name = "zip", defaultPhase = LifecyclePhase.PACKAGE, requiresDependencyResolution = ResolutionScope.COMPILE) public class ZipMojo extends AbstractFabric8Mojo { private static String[] ICON_EXTENSIONS = new String[]{".svg", ".png", ".gif", ".jpg", ".jpeg"}; /** * Name of the directory used to create the app configuration zip */ @Parameter(property = "fabric8.zip.buildDir", defaultValue = "${project.build.directory}/generated-app") private File buildDir; /** * Name of the aggregated app zip file */ @Parameter(property = "fabric8.aggregated.zip.outFile", defaultValue = "${project.build.directory}/${project.artifactId}-${project.version}-app.zip") private File aggregatedZipOutputFile; @Component private MavenProjectHelper projectHelper; // this is required for the deploy phase, but end user may just use a install phase only, so let required = false @Parameter(defaultValue = "${project.distributionManagementArtifactRepository}", readonly = true, required = false) private ArtifactRepository deploymentRepository; /** * The artifact type for attaching the generated app zip file to the project */ @Parameter(property = "fabric8.zip.artifactType", defaultValue = "zip") private String artifactType = "zip"; /** * The artifact classifier for attaching the generated app zip file to the project */ @Parameter(property = "fabric8.zip.artifactClassifier", defaultValue = "app") private String artifactClassifier = "app"; /** * Files to be excluded */ @Parameter(property = "fabric8.excludedFiles", defaultValue = "io.fabric8.agent.properties") private String[] filesToBeExcluded; /** * The projects in the reactor. */ @Parameter(defaultValue = "${reactorProjects}") private List<MavenProject> reactorProjects; /** * Name of the directory used to create the app zip files in each reactor project when creating an aggregated zip * for all the {@link #reactorProjects} */ @Parameter(property = "fabric8.fullzip.reactorProjectOutputPath", defaultValue = "target/generated-app") private String reactorProjectOutputPath; /** * Whether or not we should upload the project readme file if no specific readme file exists in the {@link #appConfigDir} */ @Parameter(property = "fabric8.includeReadMe", defaultValue = "true") protected boolean includeReadMe; /** * If provided then any links in the readme.md files will be replaced to include the given prefix */ @Parameter(property = "fabric8.replaceReadmeLinksPrefix") protected String replaceReadmeLinksPrefix; /** * Provides the resource name of the icon to use; found using the current classpath (including the ones shippped inside the maven plugin). */ @Parameter(property = "fabric8.iconRef") protected String iconRef; /** * Whether or not we should generate a <code>Summary.md</code> file from the pom.xml &lt;description&gt; element text value. */ @Parameter(property = "fabric8.generateSummaryFile", defaultValue = "true") protected boolean generateSummaryFile; /** * Whether or not we should generate a <code>fabric8.properties</code> file from the pom.xml. */ @Parameter(property = "fabric8.generateAppPropertiesFile", defaultValue = "true") protected boolean generateAppPropertiesFile; /** * The name of the path inside the zip where the app is generated. */ @Parameter(property = "fabric8.pathInZip", defaultValue = "${project.artifactId}") protected String pathInZip; /** * The maven goal used to deploy aggregated zips. Could be <code>deploy:deploy-file</code> to perform a regular deploy * or <code>gpg:sign-and-deploy-file</code> to sign and deploy the file */ @Parameter(property = "fabric8.deployFileGoal", defaultValue = "gpg:sign-and-deploy-file") protected String deployFileGoal; /** * Whether or not we should ignoreProject this maven project from this goal */ @Parameter(property = "fabric8.ignoreProject", defaultValue = "false") private boolean ignoreProject; /** * The Maven Session. * * @parameter expression="${session}" * @required * @readonly */ protected MavenSession session; @Override public void execute() throws MojoExecutionException, MojoFailureException { try { if (isIgnoreProject()) { return; } if (shouldGenerateForThisProject()) { // generate app zip (which we cannot do for a pom project) generateZip(); } boolean isLastProject = getProject() == reactorProjects.get(reactorProjects.size() - 1); getLog().debug("Is last project? " + isLastProject + " -> " + getProject().getArtifactId()); if (isLastProject) { getLog().info("Last project done. Now generating aggregated zips for the entire project(s)."); generateAggregatedZips(); } } catch (MojoFailureException | MojoExecutionException e) { throw e; } catch (Exception e) { throw new MojoExecutionException("Error executing", e); } } protected void generateZip() throws DependencyTreeBuilderException, MojoExecutionException, IOException, MojoFailureException { File appBuildDir = buildDir; if (Strings.isNotBlank(pathInZip)) { appBuildDir = new File(buildDir, pathInZip); } appBuildDir.mkdirs(); if (hasConfigDir()) { copyAppConfigFiles(appBuildDir, appConfigDir); } else { getLog().info("The app configuration files directory " + appConfigDir + " doesn't exist, so not copying any additional project documentation or configuration files"); } MavenProject project = getProject(); if (!ignoreProject) { File kubernetesJson = getKubernetesJson(); if (kubernetesJson != null && kubernetesJson.isFile() && kubernetesJson.exists()) { File jsonFile = new File(appBuildDir, "kubernetes.json"); jsonFile.getParentFile().mkdirs(); Files.copy(kubernetesJson, jsonFile); } // TODO if no iconRef is specified we could try guess based on the project? // lets check if we can use an icon reference if (Strings.isNotBlank(iconRef)) { File[] icons = appBuildDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name == null) { return false; } String lower = name.toLowerCase(); if (lower.startsWith("icon.")) { for (String ext : ICON_EXTENSIONS) { if (lower.endsWith(ext)) { return true; } } } return false; } }); if (icons == null || icons.length == 0) { // lets copy the iconRef InputStream in = loadPluginResource(iconRef); if (in == null) { // maybe it dont have extension so try to find it for (String ext : ICON_EXTENSIONS) { String name = iconRef + ext; in = loadPluginResource(name); if (in != null) { iconRef = name; break; } } } if (in == null) { getLog().warn("Could not find icon: " + iconRef + " on the ClassPath!"); } else { String fileName = "icon." + Files.getFileExtension(iconRef); File outFile = new File(appBuildDir, fileName); Files.copy(in, new FileOutputStream(outFile)); getLog().info("Generated icon file " + outFile + " from icon reference: " + iconRef); } } } } // lets only generate a app zip if we have a requirement (e.g. we're not a parent pom packaging project) and // we have defined some configuration files or dependencies // to avoid generating dummy apps for parent poms if (hasConfigDir() || !ignoreProject) { if (includeReadMe) { copyReadMe(project.getFile().getParentFile(), appBuildDir); } if (generateSummaryFile) { String description = project.getDescription(); if (Strings.isNotBlank(description)) { File summaryMd = new File(appBuildDir, "Summary.md"); summaryMd.getParentFile().mkdirs(); if (!summaryMd.exists()) { byte[] bytes = description.getBytes(); Files.copy(new ByteArrayInputStream(bytes), new FileOutputStream(summaryMd)); } } } if (generateAppPropertiesFile) { String name = project.getName(); if (Strings.isNullOrBlank(name)) { name = project.getArtifactId(); } String description = project.getDescription(); Properties appProperties = new Properties(); appProperties.put("name", name); if (Strings.isNotBlank(description)) { appProperties.put("description", description); } appProperties.put("groupId", project.getGroupId()); appProperties.put("artifactId", project.getArtifactId()); appProperties.put("version", project.getVersion()); File appPropertiesFile = new File(appBuildDir, "fabric8.properties"); appPropertiesFile.getParentFile().mkdirs(); if (!appPropertiesFile.exists()) { appProperties.store(new FileWriter(appPropertiesFile), "Fabric8 Properties"); } } File outputZipFile = getZipFile(); Zips.createZipFile(getLog(), buildDir, outputZipFile); projectHelper.attachArtifact(project, artifactType, artifactClassifier, outputZipFile); getLog().info("Created app zip file: " + outputZipFile); } } protected void generateAggregatedZips() throws IOException, MojoExecutionException { List<MavenProject> zipGoalProjects = fabricZipGoalProjects(); // we want to walk backwards Collections.reverse(zipGoalProjects); Set<MavenProject> doneParents = new HashSet<>(); for (MavenProject zipProject : zipGoalProjects) { MavenProject parent = zipProject.getParent(); if (parent == null) { continue; } // are there 2 or more projects with the same parent // then we need to aggregate them to their parent (if we have not done so before) Set<MavenProject> group = sameParent(parent, zipGoalProjects); if (group.size() >= 2 && !doneParents.contains(parent)) { doneParents.add(parent); // find transitive sub groups Set<MavenProject> nested = sameParentTransitive(parent, zipGoalProjects); if (!nested.isEmpty()) { group.addAll(nested); } generateAggregatedZip(parent, reactorProjects, group); } } } private Set<MavenProject> sameParent(MavenProject parent, List<MavenProject> projects) { Set<MavenProject> answer = new LinkedHashSet<>(); for (MavenProject zip : projects) { if (Objects.equal(parent, zip.getParent())) { answer.add(zip); } } return answer; } private Set<MavenProject> sameParentTransitive(MavenProject parent, List<MavenProject> projects) { Set<MavenProject> answer = new LinkedHashSet<>(); for (MavenProject zip : projects) { if (hasAncestor(parent, zip)) { answer.add(zip); } } return answer; } private List<MavenProject> fabricZipGoalProjects() { List<MavenProject> answer = new ArrayList<>(); if (reactorProjects != null) { List<MavenProject> pomZipProjects = new ArrayList<>(); for (MavenProject reactorProject : reactorProjects) { if (isPom(reactorProject)) { pomZipProjects.add(reactorProject); } List<Plugin> buildPlugins = reactorProject.getBuildPlugins(); for (Plugin buildPlugin : buildPlugins) { String artifactId = buildPlugin.getArtifactId(); if ("fabric8-maven-plugin".equals(artifactId)) { Object goals = buildPlugin.getGoals(); boolean hasZipGoal = goals != null && goals.toString().contains("zip"); List<PluginExecution> executions = buildPlugin.getExecutions(); for (PluginExecution execution : executions) { List<String> execGoals = execution.getGoals(); if (execGoals.contains("zip")) { hasZipGoal = true; } } getLog().debug("Project " + reactorProject.getArtifactId() + " has zip goal: " + hasZipGoal); if (hasZipGoal) { answer.add(reactorProject); } } } } } return answer; } protected void generateAggregatedZip(MavenProject rootProject, List<MavenProject> reactorProjects, Set<MavenProject> pomZipProjects) throws IOException, MojoExecutionException { File projectBaseDir = rootProject.getBasedir(); String rootProjectGroupId = rootProject.getGroupId(); String rootProjectArtifactId = rootProject.getArtifactId(); String rootProjectVersion = rootProject.getVersion(); String aggregatedZipFileName = "target/" + rootProjectArtifactId + "-" + rootProjectVersion + "-app.zip"; File projectOutputFile = new File(projectBaseDir, aggregatedZipFileName); getLog().info("Generating " + projectOutputFile.getAbsolutePath() + " from root project " + rootProjectArtifactId); File projectBuildDir = new File(projectBaseDir, reactorProjectOutputPath); if (projectOutputFile.exists()) { projectOutputFile.delete(); } createAggregatedZip(projectBaseDir, projectBuildDir, reactorProjectOutputPath, projectOutputFile, includeReadMe, pomZipProjects); if (rootProject.getAttachedArtifacts() != null) { // need to remove existing as otherwise we get a WARN Artifact found = null; for (Artifact artifact : rootProject.getAttachedArtifacts()) { if (artifactClassifier != null && artifact.hasClassifier() && artifact.getClassifier().equals(artifactClassifier)) { found = artifact; break; } } if (found != null) { rootProject.getAttachedArtifacts().remove(found); } } getLog().info("Attaching aggregated zip " + projectOutputFile + " to root project " + rootProject.getArtifactId()); projectHelper.attachArtifact(rootProject, artifactType, artifactClassifier, projectOutputFile); // if we are doing an install goal, then also install the aggregated zip manually // as maven will install the root project first, and then build the reactor projects, and at this point // it does not help to attach artifact to root project, as those artifacts will not be installed // so we need to install manually List<String> activeProfileIds = new ArrayList<>(); List<Profile> activeProfiles = rootProject.getActiveProfiles(); if (activeProfiles != null) { for (Profile profile : activeProfiles) { String id = profile.getId(); if (Strings.isNotBlank(id)) { activeProfileIds.add(id); } } } if (rootProject.hasLifecyclePhase("install")) { getLog().info("Installing aggregated zip " + projectOutputFile); InvocationRequest request = new DefaultInvocationRequest(); request.setBaseDirectory(rootProject.getBasedir()); request.setPomFile(new File("./pom.xml")); request.setGoals(Collections.singletonList("install:install-file")); request.setRecursive(false); request.setInteractive(false); request.setProfiles(activeProfileIds); Properties props = new Properties(); props.setProperty("file", aggregatedZipFileName); props.setProperty("groupId", rootProjectGroupId); props.setProperty("artifactId", rootProjectArtifactId); props.setProperty("version", rootProjectVersion); props.setProperty("classifier", "app"); props.setProperty("packaging", "zip"); props.setProperty("generatePom", "false"); request.setProperties(props); getLog().info("Installing aggregated zip using: mvn install:install-file" + serializeMvnProperties(props)); Invoker invoker = new DefaultInvoker(); try { InvocationResult result = invoker.execute(request); if (result.getExitCode() != 0) { throw new IllegalStateException("Error invoking Maven goal install:install-file"); } } catch (MavenInvocationException e) { throw new MojoExecutionException("Error invoking Maven goal install:install-file", e); } } if (rootProject.hasLifecyclePhase("deploy")) { if (deploymentRepository == null) { String msg = "Cannot run deploy phase as Maven project has no <distributionManagement> with the maven url to use for deploying the aggregated zip file"; getLog().warn(msg); throw new MojoExecutionException(msg); } getLog().info("Deploying aggregated zip " + projectOutputFile + " to root project " + rootProject.getArtifactId()); getLog().info("Using deploy goal: " + deployFileGoal + " with active profiles: " + activeProfileIds); InvocationRequest request = new DefaultInvocationRequest(); request.setBaseDirectory(rootProject.getBasedir()); request.setPomFile(new File("./pom.xml")); request.setGoals(Collections.singletonList(deployFileGoal)); request.setRecursive(false); request.setInteractive(false); request.setProfiles(activeProfileIds); Properties props = new Properties(); props.setProperty("file", aggregatedZipFileName); props.setProperty("groupId", rootProjectGroupId); props.setProperty("artifactId", rootProjectArtifactId); props.setProperty("version", rootProjectVersion); props.setProperty("classifier", "app"); props.setProperty("packaging", "zip"); props.setProperty("url", deploymentRepository.getUrl()); props.setProperty("repositoryId", deploymentRepository.getId()); props.setProperty("generatePom", "false"); request.setProperties(props); getLog().info("Deploying aggregated zip using: mvn deploy:deploy-file" + serializeMvnProperties(props)); Invoker invoker = new DefaultInvoker(); try { InvocationResult result = invoker.execute(request); if (result.getExitCode() != 0) { throw new IllegalStateException("Error invoking Maven goal deploy:deploy-file"); } } catch (MavenInvocationException e) { throw new MojoExecutionException("Error invoking Maven goal deploy:deploy-file", e); } } } private static boolean hasAncestor(MavenProject root, MavenProject target) { if (target.getParent() == null) { return false; } if (Objects.equal(root, target.getParent())) { return true; } else { return hasAncestor(root, target.getParent()); } } private static File copyReadMe(File src, File appBuildDir) throws IOException { File[] files = src.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase(Locale.ENGLISH).startsWith("readme."); } }); if (files != null && files.length == 1) { File readme = files[0]; File outFile = new File(appBuildDir, readme.getName()); Files.copy(readme, outFile); return outFile; } return null; } private static String getReadMeFileKey(String relativePath) { String answer = relativePath; if (Strings.isNullOrBlank(answer)) { return "<root>"; } // remove leading path which can be either unix or windows style int pos = relativePath.indexOf('/'); int pos2 = relativePath.indexOf('\\'); if (pos > 0 && pos2 > 0) { pos = Math.max(pos, pos2); } else if (pos2 > 0) { pos = pos2; } if (pos > -1) { answer = relativePath.substring(pos); } // and remove any leading path separators answer = Files.stripLeadingSeparator(answer); if (Strings.isNullOrBlank(answer)) { answer = "<root>"; } return answer; } /** * Replacing github links with fabric apps links for our quickstarts */ protected String replaceGithubLinks(Set<String> names, String relativePath, String line) { boolean changed = false; Pattern pattern = Pattern.compile("\\[(.*?)\\]\\((.*?)\\)"); Matcher matcher = pattern.matcher(line); StringBuffer sb = new StringBuffer(); while (matcher.find()) { String s2 = matcher.group(2); if (s2.startsWith("http:") || s2.startsWith("https:")) { // leave it as-is matcher.appendReplacement(sb, "[$1]($2)"); } else { if (names.contains(s2) || names.contains(relativePath + s2) || names.contains(relativePath + "/" + s2)) { // need to ensure path is app friendly s2 = s2; if (relativePath != null && !"<root>".equals(relativePath)) { s2 = addToPath(relativePath, s2); } // its a directory matcher.appendReplacement(sb, "[$1](" + replaceReadmeLinksPrefix + s2 + ")"); } else { // need to ensure path is app friendly s2 = s2; if (relativePath != null && !"<root>".equals(relativePath)) { s2 = addToPath(relativePath, s2); } // its a app matcher.appendReplacement(sb, "[$1](" + replaceReadmeLinksPrefix + s2 + ".app)"); } changed = true; } } matcher.appendTail(sb); if (changed) { return sb.toString(); } else { return null; } } private static String addToPath(String path, String add) { if (add.startsWith("/") || path.endsWith("/")) { return path + add; } else { return path + "/" + add; } } protected void createAggregatedZip(File projectBaseDir, File projectBuildDir, String reactorProjectOutputPath, File projectOutputFile, boolean includeReadMe, Set<MavenProject> pomZipProjects) throws IOException { projectBuildDir.mkdirs(); for (MavenProject reactorProject : pomZipProjects) { // ignoreProject the execution root which just aggregates stuff if (!reactorProject.isExecutionRoot()) { Log log = getLog(); // TODO allow the project nesting to be defined via a property? String relativePath = getChildProjectRelativePath(projectBaseDir, reactorProject); File outDir = new File(projectBuildDir, relativePath); combineAppFilesToFolder(reactorProject, outDir, log, reactorProjectOutputPath); } } // we may want to include readme files for pom projects if (includeReadMe) { Map<String, File> pomNames = new HashMap<String, File>(); for (MavenProject pomProject : pomZipProjects) { File src = pomProject.getFile().getParentFile(); String relativePath = getChildProjectRelativePath(projectBaseDir, pomProject); File outDir = new File(projectBuildDir, relativePath); File copiedFile = copyReadMe(src, outDir); if (copiedFile != null) { String key = getReadMeFileKey(relativePath); pomNames.put(key, copiedFile); } } if (replaceReadmeLinksPrefix != null) { // now parse each readme file and replace github links for (Map.Entry<String, File> entry : pomNames.entrySet()) { File file = entry.getValue(); String key = entry.getKey(); boolean changed = false; List<String> lines = Files.readLines(file); for (int i = 0; i < lines.size(); i++) { String line = lines.get(i); String newLine = replaceGithubLinks(pomNames.keySet(), key, line); if (newLine != null) { lines.set(i, newLine); changed = true; } } if (changed) { Files.writeLines(file, lines); getLog().info("Replaced github links to fabric apps in reaadme file: " + file); } } } } Zips.createZipFile(getLog(), projectBuildDir, projectOutputFile); String relativePath = Files.getRelativePath(projectBaseDir, projectOutputFile); while (relativePath.startsWith("/")) { relativePath = relativePath.substring(1); } getLog().info("Created app zip file: " + relativePath); } protected static String getChildProjectRelativePath(File projectBaseDir, MavenProject pomProject) throws IOException { // must include first dir as prefix String root = projectBaseDir.getName(); String relativePath = Files.getRelativePath(projectBaseDir, pomProject.getBasedir()); relativePath = root + File.separator + relativePath; return relativePath; } /** * Combines any files from the appSourceDir into the output directory */ public static void appendAppConfigFiles(File appSourceDir, File outputDir) throws IOException { if (appSourceDir.exists() && appSourceDir.isDirectory()) { File[] files = appSourceDir.listFiles(); if (files != null) { outputDir.mkdirs(); for (File file : files) { File outFile = new File(outputDir, file.getName()); if (file.isDirectory()) { appendAppConfigFiles(file, outFile); } else { if (outFile.exists() && file.getName().endsWith(".properties")) { System.out.println("Combining properties: file " + file.getAbsolutePath()); combinePropertiesFiles(file, outFile); } else { System.out.println("Copying file " + file.getAbsolutePath()); Files.copy(file, outFile); } } } } } } protected static void combineAppFilesToFolder(MavenProject reactorProject, File buildDir, Log log, String reactorProjectOutputPath) throws IOException { File basedir = reactorProject.getBasedir(); if (!basedir.exists()) { log.warn("No basedir " + basedir.getAbsolutePath() + " for project + " + reactorProject); return; } File outDir = new File(basedir, reactorProjectOutputPath); if (!outDir.exists()) { log.warn("No app output dir at: " + outDir.getAbsolutePath() + " for project + " + reactorProject + " so ignoring this project."); return; } log.info("Copying apps from " + outDir.getAbsolutePath() + " into the output directory: " + buildDir); File[] files = outDir.listFiles(); if (files != null) { for (File file : files) { if (file.isDirectory()) { appendAppConfigFiles(file, buildDir); } } } } /** * For 2 properties files the source and dest file, lets combine the values so that all the values of the sourceFile are in the dest file */ protected static void combinePropertiesFiles(File sourceFile, File destFile) throws IOException { Properties source = loadProperties(sourceFile); Properties dest = loadProperties(destFile); Set<Map.Entry<Object, Object>> entries = source.entrySet(); for (Map.Entry<Object, Object> entry : entries) { Object key = entry.getKey(); Object value = entry.getValue(); if (key != null && value != null) { String keyText = key.toString(); String valueText = value.toString(); String oldValue = dest.getProperty(keyText); if (oldValue == null || oldValue.trim().length() == 0) { dest.setProperty(keyText, valueText); } else { if (oldValue.contains(valueText)) { // we've already added it so ignoreProject! } else { String newValue = oldValue + " " + valueText; dest.setProperty(keyText, newValue); } } } } dest.store(new FileWriter(destFile), "Generated by fabric8:full-zip plugin at " + new Date()); } private static Properties loadProperties(File file) throws IOException { Properties answer = new Properties(); answer.load(new FileReader(file)); return answer; } /** * Copies any local configuration files into the app directory */ protected void copyAppConfigFiles(File appBuildDir, File appConfigDir) throws IOException { File[] files = appConfigDir.listFiles(); if (files != null) { appBuildDir.mkdirs(); for (File file : files) { if (!toBeExclude(file.getName())) { File outFile = new File(appBuildDir, file.getName()); if (file.isDirectory()) { copyAppConfigFiles(outFile, file); } else { Files.copy(file, outFile); } } } } } protected boolean toBeExclude(String fileName) { List excludedFilesList = Arrays.asList(filesToBeExcluded); Boolean result = excludedFilesList.contains(fileName); return result; } protected String escapeAgentPropertiesKey(String text) { return text.replaceAll("\\:", "\\\\:"); } protected String escapeAgentPropertiesValue(String text) { return escapeAgentPropertiesKey(text); } private static String leadingSlash(String path) { if (path.startsWith("/")) { return path; } else { return "/" + path; } } private String serializeMvnProperties(Properties properties) { StringBuilder sb = new StringBuilder(); if (properties != null) { for (Iterator it = properties.entrySet().iterator(); it.hasNext(); ) { Map.Entry entry = (Map.Entry) it.next(); String key = (String) entry.getKey(); String value = (String) entry.getValue(); sb.append(" -D").append(key).append('=').append(value); } } return sb.toString(); } }
fabric8-maven-plugin/src/main/java/io/fabric8/maven/ZipMojo.java
/** * Copyright 2005-2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.maven; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileOutputStream; import java.io.FileReader; import java.io.FileWriter; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import io.fabric8.utils.Files; import io.fabric8.utils.Objects; import io.fabric8.utils.Strings; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.execution.MavenSession; import org.apache.maven.model.Plugin; import org.apache.maven.model.PluginExecution; import org.apache.maven.model.Profile; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.logging.Log; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.apache.maven.shared.dependency.tree.DependencyTreeBuilderException; import org.apache.maven.shared.invoker.DefaultInvocationRequest; import org.apache.maven.shared.invoker.DefaultInvoker; import org.apache.maven.shared.invoker.InvocationRequest; import org.apache.maven.shared.invoker.InvocationResult; import org.apache.maven.shared.invoker.Invoker; import org.apache.maven.shared.invoker.MavenInvocationException; /** * Generates a ZIP file of the App for the current maven project. */ @Mojo(name = "zip", defaultPhase = LifecyclePhase.PACKAGE, requiresDependencyResolution = ResolutionScope.COMPILE) public class ZipMojo extends AbstractFabric8Mojo { private static String[] ICON_EXTENSIONS = new String[]{".svg", ".png", ".gif", ".jpg", ".jpeg"}; /** * Name of the directory used to create the app configuration zip */ @Parameter(property = "fabric8.zip.buildDir", defaultValue = "${project.build.directory}/generated-app") private File buildDir; /** * Name of the aggregated app zip file */ @Parameter(property = "fabric8.aggregated.zip.outFile", defaultValue = "${project.build.directory}/${project.artifactId}-${project.version}-app.zip") private File aggregatedZipOutputFile; @Component private MavenProjectHelper projectHelper; // this is required for the deploy phase, but end user may just use a install phase only, so let required = false @Parameter(defaultValue = "${project.distributionManagementArtifactRepository}", readonly = true, required = false) private ArtifactRepository deploymentRepository; /** * The artifact type for attaching the generated app zip file to the project */ @Parameter(property = "fabric8.zip.artifactType", defaultValue = "zip") private String artifactType = "zip"; /** * The artifact classifier for attaching the generated app zip file to the project */ @Parameter(property = "fabric8.zip.artifactClassifier", defaultValue = "app") private String artifactClassifier = "app"; /** * Files to be excluded */ @Parameter(property = "fabric8.excludedFiles", defaultValue = "io.fabric8.agent.properties") private String[] filesToBeExcluded; /** * The projects in the reactor. */ @Parameter(defaultValue = "${reactorProjects}") private List<MavenProject> reactorProjects; /** * Name of the directory used to create the app zip files in each reactor project when creating an aggregated zip * for all the {@link #reactorProjects} */ @Parameter(property = "fabric8.fullzip.reactorProjectOutputPath", defaultValue = "target/generated-app") private String reactorProjectOutputPath; /** * Whether or not we should upload the project readme file if no specific readme file exists in the {@link #appConfigDir} */ @Parameter(property = "fabric8.includeReadMe", defaultValue = "true") protected boolean includeReadMe; /** * If provided then any links in the readme.md files will be replaced to include the given prefix */ @Parameter(property = "fabric8.replaceReadmeLinksPrefix") protected String replaceReadmeLinksPrefix; /** * Provides the resource name of the icon to use; found using the current classpath (including the ones shippped inside the maven plugin). */ @Parameter(property = "fabric8.iconRef") protected String iconRef; /** * Whether or not we should generate a <code>Summary.md</code> file from the pom.xml &lt;description&gt; element text value. */ @Parameter(property = "fabric8.generateSummaryFile", defaultValue = "true") protected boolean generateSummaryFile; /** * Whether or not we should generate a <code>fabric8.properties</code> file from the pom.xml. */ @Parameter(property = "fabric8.generateAppPropertiesFile", defaultValue = "true") protected boolean generateAppPropertiesFile; /** * The name of the path inside the zip where the app is generated. */ @Parameter(property = "fabric8.pathInZip", defaultValue = "${project.artifactId}") protected String pathInZip; /** * The maven goal used to deploy aggregated zips. Could be <code>deploy:deploy-file</code> to perform a regular deploy * or <code>gpg:sign-and-deploy-file</code> to sign and deploy the file */ @Parameter(property = "fabric8.deployFileGoal", defaultValue = "gpg:sign-and-deploy-file") protected String deployFileGoal; /** * Whether or not we should ignoreProject this maven project from this goal */ @Parameter(property = "fabric8.ignoreProject", defaultValue = "false") private boolean ignoreProject; /** * The Maven Session. * * @parameter expression="${session}" * @required * @readonly */ protected MavenSession session; @Override public void execute() throws MojoExecutionException, MojoFailureException { try { if (isIgnoreProject()) { return; } if (shouldGenerateForThisProject()) { // generate app zip (which we cannot do for a pom project) generateZip(); } boolean isLastProject = getProject() == reactorProjects.get(reactorProjects.size() - 1); getLog().debug("Is last project? " + isLastProject + " -> " + getProject().getArtifactId()); if (isLastProject) { getLog().info("Last project done. Now generating aggregated zips for the entire project(s)."); generateAggregatedZips(); } } catch (MojoFailureException | MojoExecutionException e) { throw e; } catch (Exception e) { throw new MojoExecutionException("Error executing", e); } } protected void generateZip() throws DependencyTreeBuilderException, MojoExecutionException, IOException, MojoFailureException { File appBuildDir = buildDir; if (Strings.isNotBlank(pathInZip)) { appBuildDir = new File(buildDir, pathInZip); } appBuildDir.mkdirs(); if (hasConfigDir()) { copyAppConfigFiles(appBuildDir, appConfigDir); } else { getLog().info("The app configuration files directory " + appConfigDir + " doesn't exist, so not copying any additional project documentation or configuration files"); } MavenProject project = getProject(); if (!ignoreProject) { File kubernetesJson = getKubernetesJson(); if (kubernetesJson != null && kubernetesJson.isFile() && kubernetesJson.exists()) { File jsonFile = new File(appBuildDir, "kubernetes.json"); jsonFile.getParentFile().mkdirs(); Files.copy(kubernetesJson, jsonFile); } // TODO if no iconRef is specified we could try guess based on the project? // lets check if we can use an icon reference if (Strings.isNotBlank(iconRef)) { File[] icons = appBuildDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { if (name == null) { return false; } String lower = name.toLowerCase(); if (lower.startsWith("icon.")) { for (String ext : ICON_EXTENSIONS) { if (lower.endsWith(ext)) { return true; } } } return false; } }); if (icons == null || icons.length == 0) { // lets copy the iconRef InputStream in = loadPluginResource(iconRef); // maybe it dont have extension so try to find it for (String ext : ICON_EXTENSIONS) { String name = iconRef + ext; in = loadPluginResource(name); if (in != null) { iconRef = name; break; } } if (in == null) { getLog().warn("Could not find icon: " + iconRef + " on the ClassPath!"); } else { String fileName = "icon." + Files.getFileExtension(iconRef); File outFile = new File(appBuildDir, fileName); Files.copy(in, new FileOutputStream(outFile)); getLog().info("Generated icon file " + outFile + " from icon reference: " + iconRef); } } } } // lets only generate a app zip if we have a requirement (e.g. we're not a parent pom packaging project) and // we have defined some configuration files or dependencies // to avoid generating dummy apps for parent poms if (hasConfigDir() || !ignoreProject) { if (includeReadMe) { copyReadMe(project.getFile().getParentFile(), appBuildDir); } if (generateSummaryFile) { String description = project.getDescription(); if (Strings.isNotBlank(description)) { File summaryMd = new File(appBuildDir, "Summary.md"); summaryMd.getParentFile().mkdirs(); if (!summaryMd.exists()) { byte[] bytes = description.getBytes(); Files.copy(new ByteArrayInputStream(bytes), new FileOutputStream(summaryMd)); } } } if (generateAppPropertiesFile) { String name = project.getName(); if (Strings.isNullOrBlank(name)) { name = project.getArtifactId(); } String description = project.getDescription(); Properties appProperties = new Properties(); appProperties.put("name", name); if (Strings.isNotBlank(description)) { appProperties.put("description", description); } appProperties.put("groupId", project.getGroupId()); appProperties.put("artifactId", project.getArtifactId()); appProperties.put("version", project.getVersion()); File appPropertiesFile = new File(appBuildDir, "fabric8.properties"); appPropertiesFile.getParentFile().mkdirs(); if (!appPropertiesFile.exists()) { appProperties.store(new FileWriter(appPropertiesFile), "Fabric8 Properties"); } } File outputZipFile = getZipFile(); Zips.createZipFile(getLog(), buildDir, outputZipFile); projectHelper.attachArtifact(project, artifactType, artifactClassifier, outputZipFile); getLog().info("Created app zip file: " + outputZipFile); } } protected void generateAggregatedZips() throws IOException, MojoExecutionException { List<MavenProject> zipGoalProjects = fabricZipGoalProjects(); // we want to walk backwards Collections.reverse(zipGoalProjects); Set<MavenProject> doneParents = new HashSet<>(); for (MavenProject zipProject : zipGoalProjects) { MavenProject parent = zipProject.getParent(); if (parent == null) { continue; } // are there 2 or more projects with the same parent // then we need to aggregate them to their parent (if we have not done so before) Set<MavenProject> group = sameParent(parent, zipGoalProjects); if (group.size() >= 2 && !doneParents.contains(parent)) { doneParents.add(parent); // find transitive sub groups Set<MavenProject> nested = sameParentTransitive(parent, zipGoalProjects); if (!nested.isEmpty()) { group.addAll(nested); } generateAggregatedZip(parent, reactorProjects, group); } } } private Set<MavenProject> sameParent(MavenProject parent, List<MavenProject> projects) { Set<MavenProject> answer = new LinkedHashSet<>(); for (MavenProject zip : projects) { if (Objects.equal(parent, zip.getParent())) { answer.add(zip); } } return answer; } private Set<MavenProject> sameParentTransitive(MavenProject parent, List<MavenProject> projects) { Set<MavenProject> answer = new LinkedHashSet<>(); for (MavenProject zip : projects) { if (hasAncestor(parent, zip)) { answer.add(zip); } } return answer; } private List<MavenProject> fabricZipGoalProjects() { List<MavenProject> answer = new ArrayList<>(); if (reactorProjects != null) { List<MavenProject> pomZipProjects = new ArrayList<>(); for (MavenProject reactorProject : reactorProjects) { if (isPom(reactorProject)) { pomZipProjects.add(reactorProject); } List<Plugin> buildPlugins = reactorProject.getBuildPlugins(); for (Plugin buildPlugin : buildPlugins) { String artifactId = buildPlugin.getArtifactId(); if ("fabric8-maven-plugin".equals(artifactId)) { Object goals = buildPlugin.getGoals(); boolean hasZipGoal = goals != null && goals.toString().contains("zip"); List<PluginExecution> executions = buildPlugin.getExecutions(); for (PluginExecution execution : executions) { List<String> execGoals = execution.getGoals(); if (execGoals.contains("zip")) { hasZipGoal = true; } } getLog().debug("Project " + reactorProject.getArtifactId() + " has zip goal: " + hasZipGoal); if (hasZipGoal) { answer.add(reactorProject); } } } } } return answer; } protected void generateAggregatedZip(MavenProject rootProject, List<MavenProject> reactorProjects, Set<MavenProject> pomZipProjects) throws IOException, MojoExecutionException { File projectBaseDir = rootProject.getBasedir(); String rootProjectGroupId = rootProject.getGroupId(); String rootProjectArtifactId = rootProject.getArtifactId(); String rootProjectVersion = rootProject.getVersion(); String aggregatedZipFileName = "target/" + rootProjectArtifactId + "-" + rootProjectVersion + "-app.zip"; File projectOutputFile = new File(projectBaseDir, aggregatedZipFileName); getLog().info("Generating " + projectOutputFile.getAbsolutePath() + " from root project " + rootProjectArtifactId); File projectBuildDir = new File(projectBaseDir, reactorProjectOutputPath); if (projectOutputFile.exists()) { projectOutputFile.delete(); } createAggregatedZip(projectBaseDir, projectBuildDir, reactorProjectOutputPath, projectOutputFile, includeReadMe, pomZipProjects); if (rootProject.getAttachedArtifacts() != null) { // need to remove existing as otherwise we get a WARN Artifact found = null; for (Artifact artifact : rootProject.getAttachedArtifacts()) { if (artifactClassifier != null && artifact.hasClassifier() && artifact.getClassifier().equals(artifactClassifier)) { found = artifact; break; } } if (found != null) { rootProject.getAttachedArtifacts().remove(found); } } getLog().info("Attaching aggregated zip " + projectOutputFile + " to root project " + rootProject.getArtifactId()); projectHelper.attachArtifact(rootProject, artifactType, artifactClassifier, projectOutputFile); // if we are doing an install goal, then also install the aggregated zip manually // as maven will install the root project first, and then build the reactor projects, and at this point // it does not help to attach artifact to root project, as those artifacts will not be installed // so we need to install manually List<String> activeProfileIds = new ArrayList<>(); List<Profile> activeProfiles = rootProject.getActiveProfiles(); if (activeProfiles != null) { for (Profile profile : activeProfiles) { String id = profile.getId(); if (Strings.isNotBlank(id)) { activeProfileIds.add(id); } } } if (rootProject.hasLifecyclePhase("install")) { getLog().info("Installing aggregated zip " + projectOutputFile); InvocationRequest request = new DefaultInvocationRequest(); request.setBaseDirectory(rootProject.getBasedir()); request.setPomFile(new File("./pom.xml")); request.setGoals(Collections.singletonList("install:install-file")); request.setRecursive(false); request.setInteractive(false); request.setProfiles(activeProfileIds); Properties props = new Properties(); props.setProperty("file", aggregatedZipFileName); props.setProperty("groupId", rootProjectGroupId); props.setProperty("artifactId", rootProjectArtifactId); props.setProperty("version", rootProjectVersion); props.setProperty("classifier", "app"); props.setProperty("packaging", "zip"); props.setProperty("generatePom", "false"); request.setProperties(props); getLog().info("Installing aggregated zip using: mvn install:install-file" + serializeMvnProperties(props)); Invoker invoker = new DefaultInvoker(); try { InvocationResult result = invoker.execute(request); if (result.getExitCode() != 0) { throw new IllegalStateException("Error invoking Maven goal install:install-file"); } } catch (MavenInvocationException e) { throw new MojoExecutionException("Error invoking Maven goal install:install-file", e); } } if (rootProject.hasLifecyclePhase("deploy")) { if (deploymentRepository == null) { String msg = "Cannot run deploy phase as Maven project has no <distributionManagement> with the maven url to use for deploying the aggregated zip file"; getLog().warn(msg); throw new MojoExecutionException(msg); } getLog().info("Deploying aggregated zip " + projectOutputFile + " to root project " + rootProject.getArtifactId()); getLog().info("Using deploy goal: " + deployFileGoal + " with active profiles: " + activeProfileIds); InvocationRequest request = new DefaultInvocationRequest(); request.setBaseDirectory(rootProject.getBasedir()); request.setPomFile(new File("./pom.xml")); request.setGoals(Collections.singletonList(deployFileGoal)); request.setRecursive(false); request.setInteractive(false); request.setProfiles(activeProfileIds); Properties props = new Properties(); props.setProperty("file", aggregatedZipFileName); props.setProperty("groupId", rootProjectGroupId); props.setProperty("artifactId", rootProjectArtifactId); props.setProperty("version", rootProjectVersion); props.setProperty("classifier", "app"); props.setProperty("packaging", "zip"); props.setProperty("url", deploymentRepository.getUrl()); props.setProperty("repositoryId", deploymentRepository.getId()); props.setProperty("generatePom", "false"); request.setProperties(props); getLog().info("Deploying aggregated zip using: mvn deploy:deploy-file" + serializeMvnProperties(props)); Invoker invoker = new DefaultInvoker(); try { InvocationResult result = invoker.execute(request); if (result.getExitCode() != 0) { throw new IllegalStateException("Error invoking Maven goal deploy:deploy-file"); } } catch (MavenInvocationException e) { throw new MojoExecutionException("Error invoking Maven goal deploy:deploy-file", e); } } } private static boolean hasAncestor(MavenProject root, MavenProject target) { if (target.getParent() == null) { return false; } if (Objects.equal(root, target.getParent())) { return true; } else { return hasAncestor(root, target.getParent()); } } private static File copyReadMe(File src, File appBuildDir) throws IOException { File[] files = src.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase(Locale.ENGLISH).startsWith("readme."); } }); if (files != null && files.length == 1) { File readme = files[0]; File outFile = new File(appBuildDir, readme.getName()); Files.copy(readme, outFile); return outFile; } return null; } private static String getReadMeFileKey(String relativePath) { String answer = relativePath; if (Strings.isNullOrBlank(answer)) { return "<root>"; } // remove leading path which can be either unix or windows style int pos = relativePath.indexOf('/'); int pos2 = relativePath.indexOf('\\'); if (pos > 0 && pos2 > 0) { pos = Math.max(pos, pos2); } else if (pos2 > 0) { pos = pos2; } if (pos > -1) { answer = relativePath.substring(pos); } // and remove any leading path separators answer = Files.stripLeadingSeparator(answer); if (Strings.isNullOrBlank(answer)) { answer = "<root>"; } return answer; } /** * Replacing github links with fabric apps links for our quickstarts */ protected String replaceGithubLinks(Set<String> names, String relativePath, String line) { boolean changed = false; Pattern pattern = Pattern.compile("\\[(.*?)\\]\\((.*?)\\)"); Matcher matcher = pattern.matcher(line); StringBuffer sb = new StringBuffer(); while (matcher.find()) { String s2 = matcher.group(2); if (s2.startsWith("http:") || s2.startsWith("https:")) { // leave it as-is matcher.appendReplacement(sb, "[$1]($2)"); } else { if (names.contains(s2) || names.contains(relativePath + s2) || names.contains(relativePath + "/" + s2)) { // need to ensure path is app friendly s2 = s2; if (relativePath != null && !"<root>".equals(relativePath)) { s2 = addToPath(relativePath, s2); } // its a directory matcher.appendReplacement(sb, "[$1](" + replaceReadmeLinksPrefix + s2 + ")"); } else { // need to ensure path is app friendly s2 = s2; if (relativePath != null && !"<root>".equals(relativePath)) { s2 = addToPath(relativePath, s2); } // its a app matcher.appendReplacement(sb, "[$1](" + replaceReadmeLinksPrefix + s2 + ".app)"); } changed = true; } } matcher.appendTail(sb); if (changed) { return sb.toString(); } else { return null; } } private static String addToPath(String path, String add) { if (add.startsWith("/") || path.endsWith("/")) { return path + add; } else { return path + "/" + add; } } protected void createAggregatedZip(File projectBaseDir, File projectBuildDir, String reactorProjectOutputPath, File projectOutputFile, boolean includeReadMe, Set<MavenProject> pomZipProjects) throws IOException { projectBuildDir.mkdirs(); for (MavenProject reactorProject : pomZipProjects) { // ignoreProject the execution root which just aggregates stuff if (!reactorProject.isExecutionRoot()) { Log log = getLog(); // TODO allow the project nesting to be defined via a property? String relativePath = getChildProjectRelativePath(projectBaseDir, reactorProject); File outDir = new File(projectBuildDir, relativePath); combineAppFilesToFolder(reactorProject, outDir, log, reactorProjectOutputPath); } } // we may want to include readme files for pom projects if (includeReadMe) { Map<String, File> pomNames = new HashMap<String, File>(); for (MavenProject pomProject : pomZipProjects) { File src = pomProject.getFile().getParentFile(); String relativePath = getChildProjectRelativePath(projectBaseDir, pomProject); File outDir = new File(projectBuildDir, relativePath); File copiedFile = copyReadMe(src, outDir); if (copiedFile != null) { String key = getReadMeFileKey(relativePath); pomNames.put(key, copiedFile); } } if (replaceReadmeLinksPrefix != null) { // now parse each readme file and replace github links for (Map.Entry<String, File> entry : pomNames.entrySet()) { File file = entry.getValue(); String key = entry.getKey(); boolean changed = false; List<String> lines = Files.readLines(file); for (int i = 0; i < lines.size(); i++) { String line = lines.get(i); String newLine = replaceGithubLinks(pomNames.keySet(), key, line); if (newLine != null) { lines.set(i, newLine); changed = true; } } if (changed) { Files.writeLines(file, lines); getLog().info("Replaced github links to fabric apps in reaadme file: " + file); } } } } Zips.createZipFile(getLog(), projectBuildDir, projectOutputFile); String relativePath = Files.getRelativePath(projectBaseDir, projectOutputFile); while (relativePath.startsWith("/")) { relativePath = relativePath.substring(1); } getLog().info("Created app zip file: " + relativePath); } protected static String getChildProjectRelativePath(File projectBaseDir, MavenProject pomProject) throws IOException { // must include first dir as prefix String root = projectBaseDir.getName(); String relativePath = Files.getRelativePath(projectBaseDir, pomProject.getBasedir()); relativePath = root + File.separator + relativePath; return relativePath; } /** * Combines any files from the appSourceDir into the output directory */ public static void appendAppConfigFiles(File appSourceDir, File outputDir) throws IOException { if (appSourceDir.exists() && appSourceDir.isDirectory()) { File[] files = appSourceDir.listFiles(); if (files != null) { outputDir.mkdirs(); for (File file : files) { File outFile = new File(outputDir, file.getName()); if (file.isDirectory()) { appendAppConfigFiles(file, outFile); } else { if (outFile.exists() && file.getName().endsWith(".properties")) { System.out.println("Combining properties: file " + file.getAbsolutePath()); combinePropertiesFiles(file, outFile); } else { System.out.println("Copying file " + file.getAbsolutePath()); Files.copy(file, outFile); } } } } } } protected static void combineAppFilesToFolder(MavenProject reactorProject, File buildDir, Log log, String reactorProjectOutputPath) throws IOException { File basedir = reactorProject.getBasedir(); if (!basedir.exists()) { log.warn("No basedir " + basedir.getAbsolutePath() + " for project + " + reactorProject); return; } File outDir = new File(basedir, reactorProjectOutputPath); if (!outDir.exists()) { log.warn("No app output dir at: " + outDir.getAbsolutePath() + " for project + " + reactorProject + " so ignoring this project."); return; } log.info("Copying apps from " + outDir.getAbsolutePath() + " into the output directory: " + buildDir); File[] files = outDir.listFiles(); if (files != null) { for (File file : files) { if (file.isDirectory()) { appendAppConfigFiles(file, buildDir); } } } } /** * For 2 properties files the source and dest file, lets combine the values so that all the values of the sourceFile are in the dest file */ protected static void combinePropertiesFiles(File sourceFile, File destFile) throws IOException { Properties source = loadProperties(sourceFile); Properties dest = loadProperties(destFile); Set<Map.Entry<Object, Object>> entries = source.entrySet(); for (Map.Entry<Object, Object> entry : entries) { Object key = entry.getKey(); Object value = entry.getValue(); if (key != null && value != null) { String keyText = key.toString(); String valueText = value.toString(); String oldValue = dest.getProperty(keyText); if (oldValue == null || oldValue.trim().length() == 0) { dest.setProperty(keyText, valueText); } else { if (oldValue.contains(valueText)) { // we've already added it so ignoreProject! } else { String newValue = oldValue + " " + valueText; dest.setProperty(keyText, newValue); } } } } dest.store(new FileWriter(destFile), "Generated by fabric8:full-zip plugin at " + new Date()); } private static Properties loadProperties(File file) throws IOException { Properties answer = new Properties(); answer.load(new FileReader(file)); return answer; } /** * Copies any local configuration files into the app directory */ protected void copyAppConfigFiles(File appBuildDir, File appConfigDir) throws IOException { File[] files = appConfigDir.listFiles(); if (files != null) { appBuildDir.mkdirs(); for (File file : files) { if (!toBeExclude(file.getName())) { File outFile = new File(appBuildDir, file.getName()); if (file.isDirectory()) { copyAppConfigFiles(outFile, file); } else { Files.copy(file, outFile); } } } } } protected boolean toBeExclude(String fileName) { List excludedFilesList = Arrays.asList(filesToBeExcluded); Boolean result = excludedFilesList.contains(fileName); return result; } protected String escapeAgentPropertiesKey(String text) { return text.replaceAll("\\:", "\\\\:"); } protected String escapeAgentPropertiesValue(String text) { return escapeAgentPropertiesKey(text); } private static String leadingSlash(String path) { if (path.startsWith("/")) { return path; } else { return "/" + path; } } private String serializeMvnProperties(Properties properties) { StringBuilder sb = new StringBuilder(); if (properties != null) { for (Iterator it = properties.entrySet().iterator(); it.hasNext(); ) { Map.Entry entry = (Map.Entry) it.next(); String key = (String) entry.getKey(); String value = (String) entry.getValue(); sb.append(" -D").append(key).append('=').append(value); } } return sb.toString(); } }
fixes #3713
fabric8-maven-plugin/src/main/java/io/fabric8/maven/ZipMojo.java
fixes #3713
<ide><path>abric8-maven-plugin/src/main/java/io/fabric8/maven/ZipMojo.java <ide> if (icons == null || icons.length == 0) { <ide> // lets copy the iconRef <ide> InputStream in = loadPluginResource(iconRef); <del> // maybe it dont have extension so try to find it <del> for (String ext : ICON_EXTENSIONS) { <del> String name = iconRef + ext; <del> in = loadPluginResource(name); <del> if (in != null) { <del> iconRef = name; <del> break; <add> if (in == null) { <add> // maybe it dont have extension so try to find it <add> for (String ext : ICON_EXTENSIONS) { <add> String name = iconRef + ext; <add> in = loadPluginResource(name); <add> if (in != null) { <add> iconRef = name; <add> break; <add> } <ide> } <ide> } <ide> if (in == null) {
Java
apache-2.0
748fb4415736bbd86a3951477ed4b08b7beadcca
0
lpandzic/querydsl,robertandrewbain/querydsl,robertandrewbain/querydsl,pkcool/querydsl,dharaburda/querydsl,attila-kiss-it/querydsl,mdiazf/querydsl,balazs-zsoldos/querydsl,querydsl/querydsl,lpandzic/querydsl,kevinleturc/querydsl,tomforster/querydsl,johnktims/querydsl,robertandrewbain/querydsl,izeye/querydsl,tomforster/querydsl,gordski/querydsl,mdiazf/querydsl,querydsl/querydsl,attila-kiss-it/querydsl,lpandzic/querydsl,gordski/querydsl,vveloso/querydsl,dharaburda/querydsl,balazs-zsoldos/querydsl,izeye/querydsl,pkcool/querydsl,mosoft521/querydsl,mosoft521/querydsl,Log10Solutions/querydsl,mdiazf/querydsl,Log10Solutions/querydsl,kevinleturc/querydsl,dharaburda/querydsl,johnktims/querydsl,vveloso/querydsl,balazs-zsoldos/querydsl,attila-kiss-it/querydsl,Log10Solutions/querydsl,gordski/querydsl,johnktims/querydsl,querydsl/querydsl,querydsl/querydsl,mosoft521/querydsl,kevinleturc/querydsl,lpandzic/querydsl,tomforster/querydsl,izeye/querydsl,pkcool/querydsl,vveloso/querydsl
/* * Copyright (c) 2010 Mysema Ltd. * All rights reserved. * */ package com.mysema.query.types; import org.junit.Test; import com.mysema.query.types.expr.ENumber; import com.mysema.query.types.expr.ENumberConst; import com.mysema.query.types.expr.EString; import com.mysema.query.types.expr.EStringConst; public class EConstructorTest { public static class Projection{ public Projection(){ } public Projection(Long id){ } public Projection(long id, String text){ } public Projection(CharSequence text){ } } @Test public void test_Constructor(){ ENumber<Long> longVal = ENumberConst.create(1l); EString stringVal = EStringConst.create(""); new EConstructor<Projection>(Projection.class, new Class[]{long.class, String.class}, longVal, stringVal).newInstance(0l,""); } @Test public void test_create(){ ENumber<Long> longVal = ENumberConst.create(1l); EString stringVal = EStringConst.create(""); EConstructor.create(Projection.class, longVal, stringVal).newInstance(0l,""); } @Test public void test_create2(){ ENumber<Long> longVal = ENumberConst.create(1l); EConstructor.create(Projection.class, longVal).newInstance(0l); } @Test public void test_create3(){ EConstructor.create(Projection.class).newInstance(); } @Test public void test_create4(){ EString stringVal = EStringConst.create(""); EConstructor.create(Projection.class, stringVal).newInstance(""); } }
querydsl-core/src/test/java/com/mysema/query/types/EConstructorTest.java
/* * Copyright (c) 2010 Mysema Ltd. * All rights reserved. * */ package com.mysema.query.types; import static org.junit.Assert.assertEquals; import java.lang.reflect.Constructor; import org.junit.Test; import com.mysema.query.types.expr.ENumber; import com.mysema.query.types.expr.ENumberConst; import com.mysema.query.types.expr.EString; import com.mysema.query.types.expr.EStringConst; public class EConstructorTest { public static class Projection{ public Projection(){ } public Projection(Long id){ } public Projection(long id, String text){ } public Projection(CharSequence text){ } } @Test public void test_Constructor(){ ENumber<Long> longVal = ENumberConst.create(1l); EString stringVal = EStringConst.create(""); new EConstructor<Projection>(Projection.class, new Class[]{long.class, String.class}, longVal, stringVal).newInstance(0l,""); } @Test public void test_create(){ ENumber<Long> longVal = ENumberConst.create(1l); EString stringVal = EStringConst.create(""); EConstructor.create(Projection.class, longVal, stringVal).newInstance(0l,""); } @Test public void test_create2(){ ENumber<Long> longVal = ENumberConst.create(1l); EConstructor.create(Projection.class, longVal).newInstance(0l); } @Test public void test_create3(){ EConstructor.create(Projection.class).newInstance(); } @Test public void test_create4(){ EString stringVal = EStringConst.create(""); EConstructor.create(Projection.class, stringVal).newInstance(""); } }
organized imports
querydsl-core/src/test/java/com/mysema/query/types/EConstructorTest.java
organized imports
<ide><path>uerydsl-core/src/test/java/com/mysema/query/types/EConstructorTest.java <ide> * <ide> */ <ide> package com.mysema.query.types; <del> <del>import static org.junit.Assert.assertEquals; <del> <del>import java.lang.reflect.Constructor; <ide> <ide> import org.junit.Test; <ide>
Java
apache-2.0
fc44aeb4449158bee24f3262b5e2c16a941b8133
0
ChinaQuants/OG-Platform,jeorme/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,nssales/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,nssales/OG-Platform,jerome79/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,McLeodMoores/starling,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,McLeodMoores/starling,McLeodMoores/starling,jerome79/OG-Platform,McLeodMoores/starling,DevStreet/FinanceAnalytics,nssales/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,codeaudit/OG-Platform,ChinaQuants/OG-Platform
/** * Copyright (C) 2009 - 2010 by OpenGamma Inc. * * Please see distribution for license. */ package com.opengamma.financial.web.position; import java.net.URI; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.joda.beans.impl.flexi.FlexiBean; import com.opengamma.financial.position.master.PortfolioTreeDocument; import com.opengamma.financial.position.master.PositionDocument; import com.opengamma.id.UniqueIdentifier; /** * RESTful resource for a version of a position. */ @Path("/portfolios/{portfolioId}/nodes/{nodeId}/positions/{positionId}/versions/{versionId}") @Produces(MediaType.TEXT_HTML) public class WebPortfolioNodePositionVersionResource extends AbstractWebPortfolioResource { /** * Creates the resource. * @param parent the parent resource, not null */ public WebPortfolioNodePositionVersionResource(final AbstractWebPortfolioResource parent) { super(parent); } //------------------------------------------------------------------------- @GET public String get() { FlexiBean out = createRootData(); return getFreemarker().build("portfolios/portfolionodepositionversion.ftl", out); } //------------------------------------------------------------------------- /** * Creates the output root data. * @return the output root data, not null */ public FlexiBean createRootData() { PortfolioTreeDocument treeDoc = data().getPortfolio(); PositionDocument latestPositionDoc = data().getPosition(); PositionDocument versionedPosition = (PositionDocument) data().getVersioned(); FlexiBean out = getFreemarker().createRootData(); out.put("portfolioDoc", treeDoc); out.put("portfolio", treeDoc.getPortfolio()); out.put("node", data().getNode()); out.put("latestPositionDoc", latestPositionDoc); out.put("latestPosition", latestPositionDoc.getPosition()); out.put("positionDoc", versionedPosition); out.put("position", versionedPosition.getPosition()); out.put("uris", new WebPortfoliosUris(data())); return out; } //------------------------------------------------------------------------- /** * Builds a URI for this resource. * @param data the data, not null * @return the URI, not null */ public static URI uri(final WebPortfoliosData data) { return uri(data, null); } /** * Builds a URI for this resource. * @param data the data, not null * @param overrideVersionId the override version id, null uses information from data * @return the URI, not null */ public static URI uri(final WebPortfoliosData data, final UniqueIdentifier overrideVersionId) { String portfolioId = data.getBestPortfolioUriId(null); String nodeId = data.getBestNodeUriId(null); String positionId = data.getBestPositionUriId(null); String versionId = (overrideVersionId != null ? overrideVersionId.getVersion() : data.getUriVersionId()); return data.getUriInfo().getBaseUriBuilder().path(WebPortfolioNodePositionVersionResource.class).build(portfolioId, nodeId, positionId, versionId); } }
src/com/opengamma/financial/web/position/WebPortfolioNodePositionVersionResource.java
/** * Copyright (C) 2009 - 2010 by OpenGamma Inc. * * Please see distribution for license. */ package com.opengamma.financial.web.position; import java.net.URI; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.joda.beans.impl.flexi.FlexiBean; import com.opengamma.financial.position.master.PortfolioTreeDocument; import com.opengamma.financial.position.master.PositionDocument; import com.opengamma.id.UniqueIdentifier; /** * RESTful resource for all positions in a node. */ @Path("/portfolios/{portfolioId}/nodes/{nodeId}/positions/{positionId}/versions/{versionId}") @Produces(MediaType.TEXT_HTML) public class WebPortfolioNodePositionVersionResource extends AbstractWebPortfolioResource { /** * Creates the resource. * @param parent the parent resource, not null */ public WebPortfolioNodePositionVersionResource(final AbstractWebPortfolioResource parent) { super(parent); } //------------------------------------------------------------------------- @GET public String get() { FlexiBean out = createRootData(); return getFreemarker().build("portfolios/portfolionodepositionversion.ftl", out); } //------------------------------------------------------------------------- /** * Creates the output root data. * @return the output root data, not null */ public FlexiBean createRootData() { PortfolioTreeDocument treeDoc = data().getPortfolio(); PositionDocument latestPositionDoc = data().getPosition(); PositionDocument versionedPosition = (PositionDocument) data().getVersioned(); FlexiBean out = getFreemarker().createRootData(); out.put("portfolioDoc", treeDoc); out.put("portfolio", treeDoc.getPortfolio()); out.put("node", data().getNode()); out.put("latestPositionDoc", latestPositionDoc); out.put("latestPosition", latestPositionDoc.getPosition()); out.put("positionDoc", versionedPosition); out.put("position", versionedPosition.getPosition()); out.put("uris", new WebPortfoliosUris(data())); return out; } //------------------------------------------------------------------------- /** * Builds a URI for this resource. * @param data the data, not null * @return the URI, not null */ public static URI uri(final WebPortfoliosData data) { return uri(data, null); } /** * Builds a URI for this resource. * @param data the data, not null * @param overrideVersionId the override version id, null uses information from data * @return the URI, not null */ public static URI uri(final WebPortfoliosData data, final UniqueIdentifier overrideVersionId) { String portfolioId = data.getBestPortfolioUriId(null); String nodeId = data.getBestNodeUriId(null); String positionId = data.getBestPositionUriId(null); String versionId = (overrideVersionId != null ? overrideVersionId.getVersion() : data.getUriVersionId()); return data.getUriInfo().getBaseUriBuilder().path(WebPortfolioNodePositionVersionResource.class).build(portfolioId, nodeId, positionId, versionId); } }
Javadoc
src/com/opengamma/financial/web/position/WebPortfolioNodePositionVersionResource.java
Javadoc
<ide><path>rc/com/opengamma/financial/web/position/WebPortfolioNodePositionVersionResource.java <ide> import com.opengamma.id.UniqueIdentifier; <ide> <ide> /** <del> * RESTful resource for all positions in a node. <add> * RESTful resource for a version of a position. <ide> */ <ide> @Path("/portfolios/{portfolioId}/nodes/{nodeId}/positions/{positionId}/versions/{versionId}") <ide> @Produces(MediaType.TEXT_HTML)
Java
apache-2.0
c06c3efa17e6bb039df9c74b0de9027f8fec8580
0
slipstream/SlipStreamServer,slipstream/SlipStreamServer,slipstream/SlipStreamServer,slipstream/SlipStreamServer
package com.sixsq.slipstream.run; /* * +=================================================================+ * SlipStream Server (WAR) * ===== * Copyright (C) 2013 SixSq Sarl (sixsq.com) * ===== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -=================================================================- */ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import org.junit.Test; import org.restlet.Request; import org.restlet.Response; import org.restlet.data.Form; import org.restlet.data.Status; import org.restlet.representation.Representation; import org.restlet.representation.StringRepresentation; import com.sixsq.slipstream.exceptions.AbortException; import com.sixsq.slipstream.exceptions.ConfigurationException; import com.sixsq.slipstream.exceptions.NotFoundException; import com.sixsq.slipstream.exceptions.SlipStreamException; import com.sixsq.slipstream.exceptions.ValidationException; import com.sixsq.slipstream.persistence.PersistenceUtil; import com.sixsq.slipstream.persistence.Run; import com.sixsq.slipstream.persistence.RuntimeParameter; public class RuntimeParameterResourceTest extends RuntimeParameterResourceTestBase { @Test public void runtimeParameterResourceGetUnknownUuid() throws ConfigurationException { Request request = createGetRequest("unknownUuid", "aKey"); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response.getStatus()); } @Test public void runtimeParameterResourceGetUnknownKey() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("RuntimeParameterResourceGetUnknownKey"); Request request = createGetRequest(run.getUuid(), "unknownKey"); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response.getStatus()); run = Run.loadFromUuid(run.getUuid()); assertAbortSet(run); run.remove(); } @Test public void runtimeParameterResourceGetNotYetSetValue() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("runtimeParameterResourceGetNotYetSetValue"); Request request = createGetRequest(run.getUuid(), "ss:abort"); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_PRECONDITION_FAILED, response.getStatus()); assertAbortNotSet(run); run.remove(); } @Test public void runtimeParameterResourceGet() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("RuntimeParameterResourceGet"); String key = "node.1:key"; String value = "value of key"; storeRuntimeParameter(key, value, run); executeGetRequestAndAssertValue(run, key, value); run.remove(); } private void storeRuntimeParameter(String key, String value, Run run) throws ValidationException, NotFoundException { run.assignRuntimeParameter(key, value, ""); run.store(); } @Test public void runtimeParameterResourcePutExisting() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("runtimeParameterResourcePutExisting"); String key = "node.1:key"; String value = "value of key"; run.assignRuntimeParameter(key, value, ""); run.store(); Request request = createPutRequest(run.getUuid(), key, new StringRepresentation(value)); executeRequest(request); RuntimeParameter runtimeParameter = RuntimeParameter .loadFromUuidAndKey(run.getUuid(), key); run.remove(); assertNotNull(runtimeParameter); assertEquals("value of key", runtimeParameter.getValue()); assertAbortNotSet(run); } private void assertAbortNotSet(Run run) { assertThat(run.getRuntimeParameters().get("ss:abort").isSet(), is(false)); } private void assertAbortSet(Run run) { EntityManager em = PersistenceUtil.createEntityManager(); EntityTransaction transaction = em.getTransaction(); transaction.begin(); run = em.merge(run); assertThat(run.getRuntimeParameters().get("ss:abort").isSet(), is(true)); transaction.commit(); em.close(); } @Test public void runtimeParameterResourcePutNotExisting() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("runtimeParameterResourcePutNotExisting"); String key = "node.1:key"; Form form = new Form(); String value = "value of key"; form.add("value", value); Representation entity = form.getWebRepresentation(); Request request = createPutRequest(run.getUuid(), key, entity); Response response = executeRequest(request); run.remove(); assertThat(response.getStatus(), is(Status.CLIENT_ERROR_NOT_FOUND)); } @Test public void runtimeParameterRetrieveFromContainerRun() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("RuntimeParameterRetrieveFromContainerRun"); String key = "node.1:key"; String value = "value of key"; storeRuntimeParameter(key, value, run); EntityManager em = PersistenceUtil.createEntityManager(); EntityTransaction transaction = em.getTransaction(); transaction.begin(); run = Run.loadFromUuid(run.getUuid(), em); assertNotNull(run); assertEquals("value of key", run.getRuntimeParameterValue(key)); transaction.commit(); em.close(); run.remove(); } @Test public void runtimeParameterReset() throws SlipStreamException, FileNotFoundException, IOException { String key = "node.1:key"; String value = "value of key"; Run run = createAndStoreRunWithRuntimeParameter( "runtimeParameterReset", key, value); assertNotNull(run); assertEquals(value, run.getRuntimeParameterValue(key)); Request request = createDeleteRequest(run.getUuid(), key); Response response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); assertRuntimeParameterWasReset(run, key); run.remove(); } private void assertRuntimeParameterWasReset(Run run, String key) throws AbortException { RuntimeParameter rtp = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), key); assertNotNull(rtp); assertThat(rtp.isSet(), is(false)); } @Test public void wrongNodeTriggersAbort() throws SlipStreamException, FileNotFoundException, IOException { String key = "wrong.1:key"; Run run = createAndStoreRunImage("wrongNodeTriggersAbort"); Request request = createGetRequest(run.getUuid(), key); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response.getStatus()); run = Run.loadFromUuid(run.getUuid()); assertAbortSet(run); run.remove(); } @Test public void wrongKeyTriggersAbort() throws SlipStreamException, FileNotFoundException, IOException { String key = "ss:wrong"; Run run = createAndStoreRunImage("wrongKeyTriggersAbort"); Request request = createGetRequest(run.getUuid(), key); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response.getStatus()); run = Run.loadFromUuid(run.getUuid()); assertAbortSet(run); run.remove(); } @Test public void cantSetAbortTwice() throws SlipStreamException, FileNotFoundException, IOException { String key = "ss:abort"; Run run = createAndStoreRunImage("cantSetAbortTwice"); Request request = createPutRequest(run.getUuid(), key, new StringRepresentation("first abort")); Response response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); RuntimeParameter abort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), "ss:abort"); assertThat(abort.getValue(), is("first abort")); request = createPutRequest(run.getUuid(), key, new StringRepresentation("second abort")); response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_CONFLICT, response.getStatus()); abort = RuntimeParameter.loadFromUuidAndKey(run.getUuid(), "ss:abort"); assertThat(abort.getValue(), is("first abort")); run.remove(); } @Test public void errorSetsNodeAndGlobalAbort() throws FileNotFoundException, IOException, SlipStreamException { String machineAbortKey = Run.MACHINE_NAME_PREFIX.toLowerCase() + RuntimeParameter.ABORT_KEY; String globalAbortKey = RuntimeParameter.GLOBAL_ABORT_KEY; String abortMessage = "machine abort"; Run run = createAndStoreRunImage("errorSetsNodeAndGlobalAbort"); Request request = createPutRequest(run.getUuid(), machineAbortKey, new StringRepresentation(abortMessage)); Response response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); RuntimeParameter nodeAbort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), machineAbortKey); assertThat(nodeAbort.getValue(), is(abortMessage)); RuntimeParameter globalAbort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), globalAbortKey); assertThat(globalAbort.getValue(), is(abortMessage)); } @Test public void cancelAbort() throws FileNotFoundException, IOException, SlipStreamException { String machineAbortKey = Run.MACHINE_NAME_PREFIX.toLowerCase() + RuntimeParameter.ABORT_KEY; String globalAbortKey = RuntimeParameter.GLOBAL_ABORT_KEY; String abortMessage = "machine abort"; Run run = createAndStoreRunImage("errorSetsNodeAndGlobalAbort"); Request request = createPutRequest(run.getUuid(), machineAbortKey, new StringRepresentation(abortMessage)); Response response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); RuntimeParameter nodeAbort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), machineAbortKey); assertThat(nodeAbort.getValue(), is(abortMessage)); RuntimeParameter globalAbort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), RuntimeParameter.GLOBAL_ABORT_KEY); assertThat(globalAbort.getValue(), is(abortMessage)); Map<String, Object> attributes = createRequestAttributes(run.getUuid(), machineAbortKey); attributes.put(RunListResource.IGNORE_ABORT_QUERY, "true"); request = createPutRequest(attributes, new StringRepresentation("")); response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); nodeAbort = RuntimeParameter.loadFromUuidAndKey(run.getUuid(), machineAbortKey); assertThat(nodeAbort.getValue(), is("")); globalAbort = RuntimeParameter.loadFromUuidAndKey(run.getUuid(), globalAbortKey); assertThat(globalAbort.getValue(), is("")); } }
jar/src/test/java/com/sixsq/slipstream/run/RuntimeParameterResourceTest.java
package com.sixsq.slipstream.run; /* * +=================================================================+ * SlipStream Server (WAR) * ===== * Copyright (C) 2013 SixSq Sarl (sixsq.com) * ===== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * -=================================================================- */ import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.EntityTransaction; import org.junit.Ignore; import org.junit.Test; import org.restlet.Request; import org.restlet.Response; import org.restlet.data.Form; import org.restlet.data.Status; import org.restlet.representation.Representation; import org.restlet.representation.StringRepresentation; import com.sixsq.slipstream.exceptions.AbortException; import com.sixsq.slipstream.exceptions.ConfigurationException; import com.sixsq.slipstream.exceptions.NotFoundException; import com.sixsq.slipstream.exceptions.SlipStreamException; import com.sixsq.slipstream.exceptions.ValidationException; import com.sixsq.slipstream.persistence.PersistenceUtil; import com.sixsq.slipstream.persistence.Run; import com.sixsq.slipstream.persistence.RuntimeParameter; @Ignore("Problem running in Jenkins") public class RuntimeParameterResourceTest extends RuntimeParameterResourceTestBase { @Test public void runtimeParameterResourceGetUnknownUuid() throws ConfigurationException { Request request = createGetRequest("unknownUuid", "aKey"); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response.getStatus()); } @Test public void runtimeParameterResourceGetUnknownKey() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("RuntimeParameterResourceGetUnknownKey"); Request request = createGetRequest(run.getUuid(), "unknownKey"); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response.getStatus()); run = Run.loadFromUuid(run.getUuid()); assertAbortSet(run); run.remove(); } @Test public void runtimeParameterResourceGetNotYetSetValue() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("runtimeParameterResourceGetNotYetSetValue"); Request request = createGetRequest(run.getUuid(), "ss:abort"); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_PRECONDITION_FAILED, response.getStatus()); assertAbortNotSet(run); run.remove(); } @Test public void runtimeParameterResourceGet() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("RuntimeParameterResourceGet"); String key = "node.1:key"; String value = "value of key"; storeRuntimeParameter(key, value, run); executeGetRequestAndAssertValue(run, key, value); run.remove(); } private void storeRuntimeParameter(String key, String value, Run run) throws ValidationException, NotFoundException { run.assignRuntimeParameter(key, value, ""); run.store(); } @Test public void runtimeParameterResourcePutExisting() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("runtimeParameterResourcePutExisting"); String key = "node.1:key"; String value = "value of key"; run.assignRuntimeParameter(key, value, ""); run.store(); Request request = createPutRequest(run.getUuid(), key, new StringRepresentation(value)); executeRequest(request); RuntimeParameter runtimeParameter = RuntimeParameter .loadFromUuidAndKey(run.getUuid(), key); run.remove(); assertNotNull(runtimeParameter); assertEquals("value of key", runtimeParameter.getValue()); assertAbortNotSet(run); } private void assertAbortNotSet(Run run) { assertThat(run.getRuntimeParameters().get("ss:abort").isSet(), is(false)); } private void assertAbortSet(Run run) { EntityManager em = PersistenceUtil.createEntityManager(); EntityTransaction transaction = em.getTransaction(); transaction.begin(); run = em.merge(run); assertThat(run.getRuntimeParameters().get("ss:abort").isSet(), is(true)); transaction.commit(); em.close(); } @Test public void runtimeParameterResourcePutNotExisting() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("runtimeParameterResourcePutNotExisting"); String key = "node.1:key"; Form form = new Form(); String value = "value of key"; form.add("value", value); Representation entity = form.getWebRepresentation(); Request request = createPutRequest(run.getUuid(), key, entity); Response response = executeRequest(request); run.remove(); assertThat(response.getStatus(), is(Status.CLIENT_ERROR_NOT_FOUND)); } @Test public void runtimeParameterRetrieveFromContainerRun() throws FileNotFoundException, IOException, SlipStreamException { Run run = createAndStoreRunImage("RuntimeParameterRetrieveFromContainerRun"); String key = "node.1:key"; String value = "value of key"; storeRuntimeParameter(key, value, run); EntityManager em = PersistenceUtil.createEntityManager(); EntityTransaction transaction = em.getTransaction(); transaction.begin(); run = Run.loadFromUuid(run.getUuid(), em); assertNotNull(run); assertEquals("value of key", run.getRuntimeParameterValue(key)); transaction.commit(); em.close(); run.remove(); } @Test public void runtimeParameterReset() throws SlipStreamException, FileNotFoundException, IOException { String key = "node.1:key"; String value = "value of key"; Run run = createAndStoreRunWithRuntimeParameter( "runtimeParameterReset", key, value); assertNotNull(run); assertEquals(value, run.getRuntimeParameterValue(key)); Request request = createDeleteRequest(run.getUuid(), key); Response response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); assertRuntimeParameterWasReset(run, key); run.remove(); } private void assertRuntimeParameterWasReset(Run run, String key) throws AbortException { RuntimeParameter rtp = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), key); assertNotNull(rtp); assertThat(rtp.isSet(), is(false)); } @Test public void wrongNodeTriggersAbort() throws SlipStreamException, FileNotFoundException, IOException { String key = "wrong.1:key"; Run run = createAndStoreRunImage("wrongNodeTriggersAbort"); Request request = createGetRequest(run.getUuid(), key); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response.getStatus()); run = Run.loadFromUuid(run.getUuid()); assertAbortSet(run); run.remove(); } @Test public void wrongKeyTriggersAbort() throws SlipStreamException, FileNotFoundException, IOException { String key = "ss:wrong"; Run run = createAndStoreRunImage("wrongKeyTriggersAbort"); Request request = createGetRequest(run.getUuid(), key); Response response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_NOT_FOUND, response.getStatus()); run = Run.loadFromUuid(run.getUuid()); assertAbortSet(run); run.remove(); } @Test public void cantSetAbortTwice() throws SlipStreamException, FileNotFoundException, IOException { String key = "ss:abort"; Run run = createAndStoreRunImage("cantSetAbortTwice"); Request request = createPutRequest(run.getUuid(), key, new StringRepresentation("first abort")); Response response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); RuntimeParameter abort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), "ss:abort"); assertThat(abort.getValue(), is("first abort")); request = createPutRequest(run.getUuid(), key, new StringRepresentation("second abort")); response = executeRequest(request); assertEquals(Status.CLIENT_ERROR_CONFLICT, response.getStatus()); abort = RuntimeParameter.loadFromUuidAndKey(run.getUuid(), "ss:abort"); assertThat(abort.getValue(), is("first abort")); run.remove(); } @Test public void errorSetsNodeAndGlobalAbort() throws FileNotFoundException, IOException, SlipStreamException { String machineAbortKey = Run.MACHINE_NAME_PREFIX.toLowerCase() + RuntimeParameter.ABORT_KEY; String globalAbortKey = RuntimeParameter.GLOBAL_ABORT_KEY; String abortMessage = "machine abort"; Run run = createAndStoreRunImage("errorSetsNodeAndGlobalAbort"); Request request = createPutRequest(run.getUuid(), machineAbortKey, new StringRepresentation(abortMessage)); Response response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); RuntimeParameter nodeAbort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), machineAbortKey); assertThat(nodeAbort.getValue(), is(abortMessage)); RuntimeParameter globalAbort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), globalAbortKey); assertThat(globalAbort.getValue(), is(abortMessage)); } @Test public void cancelAbort() throws FileNotFoundException, IOException, SlipStreamException { String machineAbortKey = Run.MACHINE_NAME_PREFIX.toLowerCase() + RuntimeParameter.ABORT_KEY; String globalAbortKey = RuntimeParameter.GLOBAL_ABORT_KEY; String abortMessage = "machine abort"; Run run = createAndStoreRunImage("errorSetsNodeAndGlobalAbort"); Request request = createPutRequest(run.getUuid(), machineAbortKey, new StringRepresentation(abortMessage)); Response response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); RuntimeParameter nodeAbort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), machineAbortKey); assertThat(nodeAbort.getValue(), is(abortMessage)); RuntimeParameter globalAbort = RuntimeParameter.loadFromUuidAndKey( run.getUuid(), RuntimeParameter.GLOBAL_ABORT_KEY); assertThat(globalAbort.getValue(), is(abortMessage)); Map<String, Object> attributes = createRequestAttributes(run.getUuid(), machineAbortKey); attributes.put(RunListResource.IGNORE_ABORT_QUERY, "true"); request = createPutRequest(attributes, new StringRepresentation("")); response = executeRequest(request); assertEquals(Status.SUCCESS_OK, response.getStatus()); nodeAbort = RuntimeParameter.loadFromUuidAndKey(run.getUuid(), machineAbortKey); assertThat(nodeAbort.getValue(), is("")); globalAbort = RuntimeParameter.loadFromUuidAndKey(run.getUuid(), globalAbortKey); assertThat(globalAbort.getValue(), is("")); } }
enabled the test back. Instead will skip it when building the maven project in Jenkins.
jar/src/test/java/com/sixsq/slipstream/run/RuntimeParameterResourceTest.java
enabled the test back. Instead will skip it when building the maven project in Jenkins.
<ide><path>ar/src/test/java/com/sixsq/slipstream/run/RuntimeParameterResourceTest.java <ide> import javax.persistence.EntityManager; <ide> import javax.persistence.EntityTransaction; <ide> <del>import org.junit.Ignore; <ide> import org.junit.Test; <ide> import org.restlet.Request; <ide> import org.restlet.Response; <ide> import com.sixsq.slipstream.persistence.Run; <ide> import com.sixsq.slipstream.persistence.RuntimeParameter; <ide> <del>@Ignore("Problem running in Jenkins") <ide> public class RuntimeParameterResourceTest extends <ide> RuntimeParameterResourceTestBase { <ide>
JavaScript
mit
a5cdbea8399eac7259257afbb31984910971dec3
0
bfred-it/iphone-inline-video,bfred-it/iphone-inline-video
var makeVideoPlayableInline=function(){"use strict";function e(e){var r=void 0;var n=void 0;function i(t){r=requestAnimationFrame(i);e(t-(n||t));n=t}this.start=function(){if(!r){i(Date.now())}};this.stop=function(){cancelAnimationFrame(r);r=null}}function r(e,r,n,i){function t(r){if(Boolean(e[n])===Boolean(i)){r.stopImmediatePropagation()}delete e[n]}e.addEventListener(r,t,false);return t}function n(e,r,n,i){function t(){return n[r]}function u(e){n[r]=e}if(i){u(e[r])}Object.defineProperty(e,r,{get:t,set:u})}var i=typeof Symbol==="undefined"?function(e){return"@"+(e||"@")+Math.random().toString(26)}:Symbol;var t=/iPhone|iPod/i.test(navigator.userAgent);var u=i();var a=i();var d=i("nativeplay");var o=i("nativepause");function v(e){var r=new Audio;r.src=e.currentSrc||e.src;return r}var f=[];f.i=0;function c(e,r){if((f.tue||0)+200<Date.now()){e[a]=true;f.tue=Date.now()}e.currentTime=r;f[++f.i%3]=r*100|0/100}function s(e){return e.driver.currentTime>=e.video.duration}function l(e){var r=this;if(!r.hasAudio){r.driver.currentTime=r.video.currentTime+e*r.video.playbackRate/1e3;if(r.video.loop&&s(r)){r.driver.currentTime=0}}c(r.video,r.driver.currentTime);if(r.video.ended){r.video.pause(true);return false}}function p(){var e=this;var r=e[u];if(e.webkitDisplayingFullscreen){e[d]();return}if(!e.paused){return}if(!e.buffered.length){e.load()}r.driver.play();r.updater.start();e.dispatchEvent(new Event("play"));e.dispatchEvent(new Event("playing"))}function m(e){var r=this;var n=r[u];n.driver.pause();n.updater.stop();if(r.webkitDisplayingFullscreen){r[o]()}if(r.paused&&!e){return}r.dispatchEvent(new Event("pause"));if(r.ended){r[a]=true;r.dispatchEvent(new Event("ended"))}}function y(r,n){var i=r[u]={};i.hasAudio=n;i.video=r;i.updater=new e(l.bind(i));if(n){i.driver=v(r)}else{i.driver={muted:true,paused:true,pause:function t(){i.driver.paused=true},play:function a(){i.driver.paused=false;if(s(i)){c(r,0)}},get ended(){return s(i)}}}r.addEventListener("emptied",function(){if(i.driver.src&&i.driver.src!==r.currentSrc){c(r,0);r.pause();i.driver.src=r.currentSrc}},false);r.addEventListener("webkitbeginfullscreen",function(){if(!r.paused){r.pause();r[d]()}else if(n&&!i.driver.buffered.length){i.driver.load()}});if(n){r.addEventListener("webkitendfullscreen",function(){i.driver.currentTime=i.video.currentTime});r.addEventListener("seeking",function(){if(f.indexOf(i.video.currentTime*100|0/100)<0){i.driver.currentTime=i.video.currentTime}})}}function h(e){var i=e[u];e[d]=e.play;e[o]=e.pause;e.play=p;e.pause=m;n(e,"paused",i.driver);n(e,"muted",i.driver,true);n(e,"playbackRate",i.driver,true);n(e,"ended",i.driver);n(e,"loop",i.driver,true);r(e,"seeking");r(e,"seeked");r(e,"timeupdate",a,false);r(e,"ended",a,false)}function b(e){var r=arguments.length<=1||arguments[1]===undefined?true:arguments[1];var n=arguments.length<=2||arguments[2]===undefined?true:arguments[2];if(n&&!t||e[u]){return}y(e,r);h(e);if(!r&&e.autoplay){e.play()}}return b}();
dist/iphone-inline-video.browser.js
var makeVideoPlayableInline=function(){"use strict";function e(e){var r=void 0;var n=void 0;function i(t){r=requestAnimationFrame(i);e(t-(n||t));n=t}this.start=function(){if(!r){i(Date.now())}};this.stop=function(){cancelAnimationFrame(r);r=null}}function r(e,r,n,i){function t(r){if(Boolean(e[n])===Boolean(i)){r.stopImmediatePropagation()}delete e[n]}e.addEventListener(r,t,false);return t}function n(e,r,n,i){function t(){return n[r]}function u(e){n[r]=e}if(i){u(e[r])}Object.defineProperty(e,r,{get:t,set:u})}var i=typeof Symbol==="undefined"?function(e){return"@"+(e||"@")+Math.random().toString(26)}:Symbol;var t=/iPhone|iPod/i.test(navigator.userAgent);var u=i();var a=i();var d=i("nativeplay");var o=i("nativepause");function v(e){var r=new Audio;r.src=e.currentSrc||e.src;return r}var f=[];f.i=0;function c(e,r){if((f.tue||0)+200<Date.now()){e[a]=true;f.tue=Date.now()}e.currentTime=r;f[++f.i%3]=r*100|0/100}function s(e){return e.driver.currentTime>=e.video.duration}function l(e){var r=this;if(!r.hasAudio){r.driver.currentTime=r.video.currentTime+e*r.video.playbackRate/1e3;if(r.video.loop&&s(r)){r.driver.currentTime=0}}c(r.video,r.driver.currentTime);if(r.video.ended){r.video.pause(true);return false}}function p(){var e=this;var r=e[u];if(e.webkitDisplayingFullscreen){e[d]();return}if(!e.paused){return}if(!e.buffered.length){e.load()}r.driver.play();r.updater.start();e.dispatchEvent(new Event("play"));e.dispatchEvent(new Event("playing"))}function m(e){var r=this;var n=r[u];n.driver.pause();n.updater.stop();if(r.webkitDisplayingFullscreen){r[o]()}if(r.paused&&!e){return}r.dispatchEvent(new Event("pause"));if(r.ended){r[a]=true;r.dispatchEvent(new Event("ended"))}}function y(r,n){var i=r[u]={};i.hasAudio=n;i.video=r;i.updater=new e(l.bind(i));if(n){i.driver=v(r)}else{i.driver={muted:true,paused:true,pause:function t(){i.driver.paused=true},play:function a(){i.driver.paused=false;if(s(i)){c(r,0)}},get ended(){return s(i)}}}r.addEventListener("emptied",function(){if(i.driver.src&&i.driver.src!==r.currentSrc){c(r,0);r.pause();i.driver.src=r.currentSrc}},false);r.addEventListener("webkitbeginfullscreen",function(){if(!r.paused){r.pause();r[d]()}else if(n&&!i.driver.buffered.length){i.driver.load()}});if(n){r.addEventListener("webkitendfullscreen",function(){i.driver.currentTime=i.video.currentTime});r.addEventListener("seeking",function(){if(f.indexOf(i.video.currentTime*100|0/100)<0){i.driver.currentTime=i.video.currentTime}})}}function h(e){var i=e[u];e[d]=e.play;e[o]=e.pause;e.play=p;e.pause=m;n(e,"paused",i.driver);n(e,"muted",i.driver,true);n(e,"playbackRate",i.driver,true);n(e,"ended",i.driver);n(e,"loop",i.driver,true);r(e,"seeking");r(e,"seeked");r(e,"timeupdate",a,false);r(e,"ended",a,false)}function b(e){var r=arguments.length<=1||arguments[1]===undefined?true:arguments[1];var n=arguments.length<=2||arguments[2]===undefined?true:arguments[2];if(n&&!t){return}y(e,r);h(e);if(!r&&e.autoplay){e.play()}}return b}();
Updated built browser file
dist/iphone-inline-video.browser.js
Updated built browser file
<ide><path>ist/iphone-inline-video.browser.js <del>var makeVideoPlayableInline=function(){"use strict";function e(e){var r=void 0;var n=void 0;function i(t){r=requestAnimationFrame(i);e(t-(n||t));n=t}this.start=function(){if(!r){i(Date.now())}};this.stop=function(){cancelAnimationFrame(r);r=null}}function r(e,r,n,i){function t(r){if(Boolean(e[n])===Boolean(i)){r.stopImmediatePropagation()}delete e[n]}e.addEventListener(r,t,false);return t}function n(e,r,n,i){function t(){return n[r]}function u(e){n[r]=e}if(i){u(e[r])}Object.defineProperty(e,r,{get:t,set:u})}var i=typeof Symbol==="undefined"?function(e){return"@"+(e||"@")+Math.random().toString(26)}:Symbol;var t=/iPhone|iPod/i.test(navigator.userAgent);var u=i();var a=i();var d=i("nativeplay");var o=i("nativepause");function v(e){var r=new Audio;r.src=e.currentSrc||e.src;return r}var f=[];f.i=0;function c(e,r){if((f.tue||0)+200<Date.now()){e[a]=true;f.tue=Date.now()}e.currentTime=r;f[++f.i%3]=r*100|0/100}function s(e){return e.driver.currentTime>=e.video.duration}function l(e){var r=this;if(!r.hasAudio){r.driver.currentTime=r.video.currentTime+e*r.video.playbackRate/1e3;if(r.video.loop&&s(r)){r.driver.currentTime=0}}c(r.video,r.driver.currentTime);if(r.video.ended){r.video.pause(true);return false}}function p(){var e=this;var r=e[u];if(e.webkitDisplayingFullscreen){e[d]();return}if(!e.paused){return}if(!e.buffered.length){e.load()}r.driver.play();r.updater.start();e.dispatchEvent(new Event("play"));e.dispatchEvent(new Event("playing"))}function m(e){var r=this;var n=r[u];n.driver.pause();n.updater.stop();if(r.webkitDisplayingFullscreen){r[o]()}if(r.paused&&!e){return}r.dispatchEvent(new Event("pause"));if(r.ended){r[a]=true;r.dispatchEvent(new Event("ended"))}}function y(r,n){var i=r[u]={};i.hasAudio=n;i.video=r;i.updater=new e(l.bind(i));if(n){i.driver=v(r)}else{i.driver={muted:true,paused:true,pause:function t(){i.driver.paused=true},play:function a(){i.driver.paused=false;if(s(i)){c(r,0)}},get ended(){return s(i)}}}r.addEventListener("emptied",function(){if(i.driver.src&&i.driver.src!==r.currentSrc){c(r,0);r.pause();i.driver.src=r.currentSrc}},false);r.addEventListener("webkitbeginfullscreen",function(){if(!r.paused){r.pause();r[d]()}else if(n&&!i.driver.buffered.length){i.driver.load()}});if(n){r.addEventListener("webkitendfullscreen",function(){i.driver.currentTime=i.video.currentTime});r.addEventListener("seeking",function(){if(f.indexOf(i.video.currentTime*100|0/100)<0){i.driver.currentTime=i.video.currentTime}})}}function h(e){var i=e[u];e[d]=e.play;e[o]=e.pause;e.play=p;e.pause=m;n(e,"paused",i.driver);n(e,"muted",i.driver,true);n(e,"playbackRate",i.driver,true);n(e,"ended",i.driver);n(e,"loop",i.driver,true);r(e,"seeking");r(e,"seeked");r(e,"timeupdate",a,false);r(e,"ended",a,false)}function b(e){var r=arguments.length<=1||arguments[1]===undefined?true:arguments[1];var n=arguments.length<=2||arguments[2]===undefined?true:arguments[2];if(n&&!t){return}y(e,r);h(e);if(!r&&e.autoplay){e.play()}}return b}(); <add>var makeVideoPlayableInline=function(){"use strict";function e(e){var r=void 0;var n=void 0;function i(t){r=requestAnimationFrame(i);e(t-(n||t));n=t}this.start=function(){if(!r){i(Date.now())}};this.stop=function(){cancelAnimationFrame(r);r=null}}function r(e,r,n,i){function t(r){if(Boolean(e[n])===Boolean(i)){r.stopImmediatePropagation()}delete e[n]}e.addEventListener(r,t,false);return t}function n(e,r,n,i){function t(){return n[r]}function u(e){n[r]=e}if(i){u(e[r])}Object.defineProperty(e,r,{get:t,set:u})}var i=typeof Symbol==="undefined"?function(e){return"@"+(e||"@")+Math.random().toString(26)}:Symbol;var t=/iPhone|iPod/i.test(navigator.userAgent);var u=i();var a=i();var d=i("nativeplay");var o=i("nativepause");function v(e){var r=new Audio;r.src=e.currentSrc||e.src;return r}var f=[];f.i=0;function c(e,r){if((f.tue||0)+200<Date.now()){e[a]=true;f.tue=Date.now()}e.currentTime=r;f[++f.i%3]=r*100|0/100}function s(e){return e.driver.currentTime>=e.video.duration}function l(e){var r=this;if(!r.hasAudio){r.driver.currentTime=r.video.currentTime+e*r.video.playbackRate/1e3;if(r.video.loop&&s(r)){r.driver.currentTime=0}}c(r.video,r.driver.currentTime);if(r.video.ended){r.video.pause(true);return false}}function p(){var e=this;var r=e[u];if(e.webkitDisplayingFullscreen){e[d]();return}if(!e.paused){return}if(!e.buffered.length){e.load()}r.driver.play();r.updater.start();e.dispatchEvent(new Event("play"));e.dispatchEvent(new Event("playing"))}function m(e){var r=this;var n=r[u];n.driver.pause();n.updater.stop();if(r.webkitDisplayingFullscreen){r[o]()}if(r.paused&&!e){return}r.dispatchEvent(new Event("pause"));if(r.ended){r[a]=true;r.dispatchEvent(new Event("ended"))}}function y(r,n){var i=r[u]={};i.hasAudio=n;i.video=r;i.updater=new e(l.bind(i));if(n){i.driver=v(r)}else{i.driver={muted:true,paused:true,pause:function t(){i.driver.paused=true},play:function a(){i.driver.paused=false;if(s(i)){c(r,0)}},get ended(){return s(i)}}}r.addEventListener("emptied",function(){if(i.driver.src&&i.driver.src!==r.currentSrc){c(r,0);r.pause();i.driver.src=r.currentSrc}},false);r.addEventListener("webkitbeginfullscreen",function(){if(!r.paused){r.pause();r[d]()}else if(n&&!i.driver.buffered.length){i.driver.load()}});if(n){r.addEventListener("webkitendfullscreen",function(){i.driver.currentTime=i.video.currentTime});r.addEventListener("seeking",function(){if(f.indexOf(i.video.currentTime*100|0/100)<0){i.driver.currentTime=i.video.currentTime}})}}function h(e){var i=e[u];e[d]=e.play;e[o]=e.pause;e.play=p;e.pause=m;n(e,"paused",i.driver);n(e,"muted",i.driver,true);n(e,"playbackRate",i.driver,true);n(e,"ended",i.driver);n(e,"loop",i.driver,true);r(e,"seeking");r(e,"seeked");r(e,"timeupdate",a,false);r(e,"ended",a,false)}function b(e){var r=arguments.length<=1||arguments[1]===undefined?true:arguments[1];var n=arguments.length<=2||arguments[2]===undefined?true:arguments[2];if(n&&!t||e[u]){return}y(e,r);h(e);if(!r&&e.autoplay){e.play()}}return b}();
Java
apache-2.0
0b34d7cd3eb8963874cdfaa189797ab14a4417f6
0
GerritCodeReview/gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit,WANdisco/gerrit,gerrit-review/gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,qtproject/qtqa-gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,gerrit-review/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,WANdisco/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,GerritCodeReview/gerrit
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.git; import com.google.gerrit.extensions.client.ChangeKind; import com.google.gerrit.reviewdb.client.Branch; import com.google.gerrit.reviewdb.client.Change.Status; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.server.change.ChangeKindCache; import com.google.gerrit.server.git.CodeReviewCommit.CodeReviewRevWalk; import com.google.gerrit.server.git.strategy.CommitMergeStatus; import com.google.gerrit.server.query.change.ChangeData; import com.google.gerrit.server.query.change.InternalChangeQuery; import com.google.gwtorm.server.OrmException; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevFlag; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RebaseSorter { private static final Logger log = LoggerFactory.getLogger(RebaseSorter.class); private final CodeReviewRevWalk rw; private final RevFlag canMergeFlag; private final RevCommit initialTip; private final Set<RevCommit> alreadyAccepted; private final InternalChangeQuery internalChangeQuery; private final ChangeKindCache changeKindCache; private final Repository repo; public RebaseSorter( CodeReviewRevWalk rw, RevCommit initialTip, Set<RevCommit> alreadyAccepted, RevFlag canMergeFlag, InternalChangeQuery internalChangeQuery, ChangeKindCache changeKindCache, Repository repo) { this.rw = rw; this.canMergeFlag = canMergeFlag; this.initialTip = initialTip; this.alreadyAccepted = alreadyAccepted; this.internalChangeQuery = internalChangeQuery; this.changeKindCache = changeKindCache; this.repo = repo; } public List<CodeReviewCommit> sort(Collection<CodeReviewCommit> incoming) throws IOException { final List<CodeReviewCommit> sorted = new ArrayList<>(); final Set<CodeReviewCommit> sort = new HashSet<>(incoming); while (!sort.isEmpty()) { final CodeReviewCommit n = removeOne(sort); rw.resetRetain(canMergeFlag); rw.markStart(n); if (initialTip != null) { rw.markUninteresting(initialTip); } CodeReviewCommit c; final List<CodeReviewCommit> contents = new ArrayList<>(); while ((c = rw.next()) != null) { if (!c.has(canMergeFlag) || !incoming.contains(c)) { if (isAlreadyMerged(c, n.change().getDest())) { rw.markUninteresting(c); } else { // We cannot merge n as it would bring something we // aren't permitted to merge at this time. Drop n. // n.setStatusCode(CommitMergeStatus.MISSING_DEPENDENCY); } // Stop RevWalk because c is either a merged commit or a missing // dependency. Not need to walk further. break; } contents.add(c); } if (n.getStatusCode() == CommitMergeStatus.MISSING_DEPENDENCY) { continue; } sort.removeAll(contents); Collections.reverse(contents); sorted.removeAll(contents); sorted.addAll(contents); } return sorted; } private boolean isAlreadyMerged(CodeReviewCommit commit, Branch.NameKey dest) throws IOException { try (CodeReviewRevWalk mirw = CodeReviewCommit.newRevWalk(rw.getObjectReader())) { mirw.reset(); mirw.markStart(commit); // check if the commit is merged in other branches for (RevCommit accepted : alreadyAccepted) { if (mirw.isMergedInto(mirw.parseCommit(accepted), mirw.parseCommit(commit))) { log.debug("Dependency {} merged into branch head {}.", commit.getName(), accepted.getName()); return true; } } // check if the commit associated change is merged in the same branch List<ChangeData> changes = internalChangeQuery.byCommit(commit); for (ChangeData change : changes) { if (change.change().getStatus() == Status.MERGED && change.change().getDest().equals(dest) && !isRework(dest.getParentKey(), commit, change)) { log.debug("Dependency {} associated with merged change {}.", commit.getName(), change.getId()); return true; } } return false; } catch (OrmException e) { throw new IOException(e); } } private boolean isRework(Project.NameKey project, RevCommit oldCommit, ChangeData change) throws OrmException, IOException { RevCommit currentCommit = rw.parseCommit(ObjectId.fromString(change.currentPatchSet().getRevision().get())); return ChangeKind.REWORK == changeKindCache.getChangeKind(project, repo, oldCommit, currentCommit); } private static <T> T removeOne(final Collection<T> c) { final Iterator<T> i = c.iterator(); final T r = i.next(); i.remove(); return r; } }
gerrit-server/src/main/java/com/google/gerrit/server/git/RebaseSorter.java
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.git; import com.google.gerrit.extensions.client.ChangeKind; import com.google.gerrit.reviewdb.client.Branch; import com.google.gerrit.reviewdb.client.Change.Status; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.server.change.ChangeKindCache; import com.google.gerrit.server.git.CodeReviewCommit.CodeReviewRevWalk; import com.google.gerrit.server.git.strategy.CommitMergeStatus; import com.google.gerrit.server.query.change.ChangeData; import com.google.gerrit.server.query.change.InternalChangeQuery; import com.google.gwtorm.server.OrmException; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevFlag; public class RebaseSorter { private final CodeReviewRevWalk rw; private final RevFlag canMergeFlag; private final RevCommit initialTip; private final Set<RevCommit> alreadyAccepted; private final InternalChangeQuery internalChangeQuery; private final ChangeKindCache changeKindCache; private final Repository repo; public RebaseSorter( CodeReviewRevWalk rw, RevCommit initialTip, Set<RevCommit> alreadyAccepted, RevFlag canMergeFlag, InternalChangeQuery internalChangeQuery, ChangeKindCache changeKindCache, Repository repo) { this.rw = rw; this.canMergeFlag = canMergeFlag; this.initialTip = initialTip; this.alreadyAccepted = alreadyAccepted; this.internalChangeQuery = internalChangeQuery; this.changeKindCache = changeKindCache; this.repo = repo; } public List<CodeReviewCommit> sort(Collection<CodeReviewCommit> incoming) throws IOException { final List<CodeReviewCommit> sorted = new ArrayList<>(); final Set<CodeReviewCommit> sort = new HashSet<>(incoming); while (!sort.isEmpty()) { final CodeReviewCommit n = removeOne(sort); rw.resetRetain(canMergeFlag); rw.markStart(n); if (initialTip != null) { rw.markUninteresting(initialTip); } CodeReviewCommit c; final List<CodeReviewCommit> contents = new ArrayList<>(); while ((c = rw.next()) != null) { if (!c.has(canMergeFlag) || !incoming.contains(c)) { if (isAlreadyMerged(c, n.change().getDest())) { rw.markUninteresting(c); } else { // We cannot merge n as it would bring something we // aren't permitted to merge at this time. Drop n. // n.setStatusCode(CommitMergeStatus.MISSING_DEPENDENCY); } // Stop RevWalk because c is either a merged commit or a missing // dependency. Not need to walk further. break; } contents.add(c); } if (n.getStatusCode() == CommitMergeStatus.MISSING_DEPENDENCY) { continue; } sort.removeAll(contents); Collections.reverse(contents); sorted.removeAll(contents); sorted.addAll(contents); } return sorted; } private boolean isAlreadyMerged(CodeReviewCommit commit, Branch.NameKey dest) throws IOException { try (CodeReviewRevWalk mirw = CodeReviewCommit.newRevWalk(rw.getObjectReader())) { mirw.reset(); mirw.markStart(commit); // check if the commit is merged in other branches for (RevCommit accepted : alreadyAccepted) { if (mirw.isMergedInto(mirw.parseCommit(accepted), mirw.parseCommit(commit))) { return true; } } // check if the commit associated change is merged in the same branch List<ChangeData> changes = internalChangeQuery.byCommit(commit); for (ChangeData change : changes) { if (change.change().getStatus() == Status.MERGED && change.change().getDest().equals(dest) && !isRework(dest.getParentKey(), commit, change)) { return true; } } return false; } catch (OrmException e) { return false; } } private boolean isRework(Project.NameKey project, RevCommit oldCommit, ChangeData change) throws OrmException, IOException { RevCommit currentCommit = rw.parseCommit(ObjectId.fromString(change.currentPatchSet().getRevision().get())); return ChangeKind.REWORK == changeKindCache.getChangeKind(project, repo, oldCommit, currentCommit); } private static <T> T removeOne(final Collection<T> c) { final Iterator<T> i = c.iterator(); final T r = i.next(); i.remove(); return r; } }
Return exception instead of claiming missing dependency If we encountered an OrmException during change query, we should confuse user by claiming that there is a missing dependency. Also add more debugging logs. Change-Id: Ia70b4a865a514996cb694d17db43b8e89457239f
gerrit-server/src/main/java/com/google/gerrit/server/git/RebaseSorter.java
Return exception instead of claiming missing dependency
<ide><path>errit-server/src/main/java/com/google/gerrit/server/git/RebaseSorter.java <ide> import org.eclipse.jgit.lib.Repository; <ide> import org.eclipse.jgit.revwalk.RevCommit; <ide> import org.eclipse.jgit.revwalk.RevFlag; <add>import org.slf4j.Logger; <add>import org.slf4j.LoggerFactory; <ide> <ide> public class RebaseSorter { <add> private static final Logger log = LoggerFactory.getLogger(RebaseSorter.class); <add> <ide> private final CodeReviewRevWalk rw; <ide> private final RevFlag canMergeFlag; <ide> private final RevCommit initialTip; <ide> // check if the commit is merged in other branches <ide> for (RevCommit accepted : alreadyAccepted) { <ide> if (mirw.isMergedInto(mirw.parseCommit(accepted), mirw.parseCommit(commit))) { <add> log.debug("Dependency {} merged into branch head {}.", commit.getName(), <add> accepted.getName()); <ide> return true; <ide> } <ide> } <ide> if (change.change().getStatus() == Status.MERGED <ide> && change.change().getDest().equals(dest) <ide> && !isRework(dest.getParentKey(), commit, change)) { <add> log.debug("Dependency {} associated with merged change {}.", commit.getName(), <add> change.getId()); <ide> return true; <ide> } <ide> } <ide> return false; <ide> } catch (OrmException e) { <del> return false; <add> throw new IOException(e); <ide> } <ide> } <ide>
Java
epl-1.0
16d2ced4c69f9453c5465da66a91ac10d51833e2
0
vkolotov/smarthome,S0urceror/smarthome,Snickermicker/smarthome,resetnow/smarthome,kgoderis/smarthome,dvanherbergen/smarthome,shry15harsh/smarthome,Mixajlo/smarthome,chrisschauer/smarthome,AchimHentschel/smarthome,kceiw/smarthome,AchimHentschel/smarthome,cdjackson/smarthome,Mixajlo/smarthome,kdavis-mozilla/smarthome,CrackerStealth/smarthome,phxql/smarthome,cdjackson/smarthome,kgoderis/smarthome,kceiw/smarthome,sja/smarthome,adimova/smarthome,plamen-peev/smarthome,Stratehm/smarthome,S0urceror/smarthome,S0urceror/smarthome,sja/smarthome,marinmitev/smarthome,marinmitev/smarthome,S0urceror/smarthome,kgoderis/smarthome,philomatic/smarthome,sja/smarthome,Mixajlo/smarthome,BenediktNiehues/smarthome,chrisschauer/smarthome,adimova/smarthome,marinmitev/smarthome,chrisschauer/smarthome,shry15harsh/smarthome,plamen-peev/smarthome,marinmitev/smarthome,AchimHentschel/smarthome,phxql/smarthome,Snickermicker/smarthome,cdjackson/smarthome,vkolotov/smarthome,chaton78/smarthome,kdavis-mozilla/smarthome,shry15harsh/smarthome,chaton78/smarthome,resetnow/smarthome,philomatic/smarthome,BenediktNiehues/smarthome,kgoderis/smarthome,Mixajlo/smarthome,resetnow/smarthome,AchimHentschel/smarthome,Stratehm/smarthome,kdavis-mozilla/smarthome,sja/smarthome,cdjackson/smarthome,shry15harsh/smarthome,vkolotov/smarthome,Stratehm/smarthome,plamen-peev/smarthome,ArchibaldLeMagnifique/smarthome,resetnow/smarthome,dvanherbergen/smarthome,kdavis-mozilla/smarthome,plamen-peev/smarthome,CrackerStealth/smarthome,kceiw/smarthome,adimova/smarthome,chrisschauer/smarthome,philomatic/smarthome,philomatic/smarthome,neverend92/smarthome,ArchibaldLeMagnifique/smarthome,vkolotov/smarthome,BenediktNiehues/smarthome,neverend92/smarthome,BenediktNiehues/smarthome,CrackerStealth/smarthome,phxql/smarthome,dvanherbergen/smarthome,Snickermicker/smarthome,neverend92/smarthome,chaton78/smarthome,Snickermicker/smarthome,kceiw/smarthome,neverend92/smarthome,phxql/smarthome,chaton78/smarthome,CrackerStealth/smarthome,ArchibaldLeMagnifique/smarthome,Stratehm/smarthome,adimova/smarthome,dvanherbergen/smarthome
/** * Copyright (c) 2014-2015 openHAB UG (haftungsbeschraenkt) and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.smarthome.io.rest.core.item; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Set; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import org.eclipse.smarthome.core.events.EventPublisher; import org.eclipse.smarthome.core.items.ActiveItem; import org.eclipse.smarthome.core.items.GenericItem; import org.eclipse.smarthome.core.items.GroupItem; import org.eclipse.smarthome.core.items.Item; import org.eclipse.smarthome.core.items.ItemFactory; import org.eclipse.smarthome.core.items.ItemNotFoundException; import org.eclipse.smarthome.core.items.ItemRegistry; import org.eclipse.smarthome.core.items.ManagedItemProvider; import org.eclipse.smarthome.core.items.dto.GroupItemDTO; import org.eclipse.smarthome.core.items.events.ItemEventFactory; import org.eclipse.smarthome.core.library.items.RollershutterItem; import org.eclipse.smarthome.core.library.items.SwitchItem; import org.eclipse.smarthome.core.library.types.OnOffType; import org.eclipse.smarthome.core.library.types.UpDownType; import org.eclipse.smarthome.core.types.Command; import org.eclipse.smarthome.core.types.State; import org.eclipse.smarthome.core.types.TypeParser; import org.eclipse.smarthome.io.rest.JSONResponse; import org.eclipse.smarthome.io.rest.LocaleUtil; import org.eclipse.smarthome.io.rest.RESTResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Strings; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; /** * <p> * This class acts as a REST resource for items and provides different methods to interact with them, like retrieving * lists of items, sending commands to them or checking a single status. * </p> * * <p> * The typical content types are plain text for status values and XML or JSON(P) for more complex data structures * </p> * * <p> * This resource is registered with the Jersey servlet. * </p> * * @author Kai Kreuzer - Initial contribution and API * @author Dennis Nobel - Added methods for item management * @author Andre Fuechsel - Added tag support * @author Chris Jackson - Added method to write complete item bean * @author Stefan Bußweiler - Migration to new ESH event concept * @author Yordan Zhelev - Added Swagger annotations * @author Jörg Plewe - refactoring, error handling */ @Path(ItemResource.PATH_ITEMS) @Api(value = ItemResource.PATH_ITEMS) public class ItemResource implements RESTResource { private final Logger logger = LoggerFactory.getLogger(ItemResource.class); /** The URI path to this resource */ public static final String PATH_ITEMS = "items"; @Context UriInfo uriInfo; @Context UriInfo localUriInfo; private ItemRegistry itemRegistry; private EventPublisher eventPublisher; private ManagedItemProvider managedItemProvider; private Set<ItemFactory> itemFactories = new HashSet<>(); protected void setItemRegistry(ItemRegistry itemRegistry) { this.itemRegistry = itemRegistry; } protected void unsetItemRegistry(ItemRegistry itemRegistry) { this.itemRegistry = null; } protected void setEventPublisher(EventPublisher eventPublisher) { this.eventPublisher = eventPublisher; } protected void unsetEventPublisher(EventPublisher eventPublisher) { this.eventPublisher = null; } protected void setManagedItemProvider(ManagedItemProvider managedItemProvider) { this.managedItemProvider = managedItemProvider; } protected void unsetManagedItemProvider(ManagedItemProvider managedItemProvider) { this.managedItemProvider = null; } protected void addItemFactory(ItemFactory itemFactory) { this.itemFactories.add(itemFactory); } protected void removeItemFactory(ItemFactory itemFactory) { this.itemFactories.remove(itemFactory); } @GET @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Get all available items.", response = EnrichedItemDTO.class, responseContainer = "List") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK") }) public Response getItems(@HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language, @QueryParam("type") @ApiParam(value = "item type filter", required = false) String type, @QueryParam("tags") @ApiParam(value = "item tag filter", required = false) String tags, @DefaultValue("false") @QueryParam("recursive") @ApiParam(value = "get member items recursivly", required = false) boolean recursive) { final Locale locale = LocaleUtil.getLocale(language); logger.debug("Received HTTP GET request at '{}'", uriInfo.getPath()); Object responseObject = getItemBeans(type, tags, recursive, locale); return Response.ok(responseObject).build(); } @GET @Path("/{itemname: [a-zA-Z_0-9]*}") @Produces({ MediaType.WILDCARD }) @ApiOperation(value = "Gets a single item.", response = EnrichedItemDTO.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found") }) public Response getItemData(@HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language, @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname) { final Locale locale = LocaleUtil.getLocale(language); logger.debug("Received HTTP GET request at '{}'", uriInfo.getPath()); // get item Item item = getItem(itemname); // if it exists if (item != null) { logger.debug("Received HTTP GET request at '{}'.", uriInfo.getPath()); return getItemResponse(Status.OK, item, locale, null); } else { logger.info("Received HTTP GET request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return getItemNotFoundResponse(itemname); } } /** * * @param itemname * @return */ @GET @Path("/{itemname: [a-zA-Z_0-9]*}/state") @Produces(MediaType.TEXT_PLAIN) @ApiOperation(value = "Gets the state of an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found") }) public Response getPlainItemState( @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname) { // get item Item item = getItem(itemname); // if it exists if (item != null) { logger.debug("Received HTTP GET request at '{}'.", uriInfo.getPath()); // we cannot use JSONResponse.createResponse() bc. MediaType.TEXT_PLAIN // return JSONResponse.createResponse(Status.OK, item.getState().toString(), null); return Response.ok(item.getState().toString()).build(); } else { logger.info("Received HTTP GET request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return getItemNotFoundResponse(itemname); } } @PUT @Path("/{itemname: [a-zA-Z_0-9]*}/state") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Updates the state of an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found"), @ApiResponse(code = 400, message = "Item state null") }) public Response putItemState( @HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language, @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @ApiParam(value = "valid item state (e.g. ON, OFF)", required = true) String value) { final Locale locale = LocaleUtil.getLocale(language); // get Item Item item = getItem(itemname); // if Item exists if (item != null) { // try to parse a State from the input State state = TypeParser.parseState(item.getAcceptedDataTypes(), value); if (state != null) { // set State and report OK logger.debug("Received HTTP PUT request at '{}' with value '{}'.", uriInfo.getPath(), value); eventPublisher.post(ItemEventFactory.createStateEvent(itemname, state)); return getItemResponse(Status.ACCEPTED, null, locale, null); } else { // State could not be parsed logger.warn("Received HTTP PUT request at '{}' with an invalid status value '{}'.", uriInfo.getPath(), value); return JSONResponse.createErrorResponse(Status.BAD_REQUEST, "State could not be parsed: " + value); } } else { // Item does not exist logger.info("Received HTTP PUT request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return getItemNotFoundResponse(itemname); } } @POST @Path("/{itemname: [a-zA-Z_0-9]*}") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Sends a command to an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found"), @ApiResponse(code = 400, message = "Item command null") }) public Response postItemCommand( @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @ApiParam(value = "valid item command (e.g. ON, OFF, UP, DOWN, REFRESH)", required = true) String value) { Item item = getItem(itemname); Command command = null; if (item != null) { if ("toggle".equalsIgnoreCase(value) && (item instanceof SwitchItem || item instanceof RollershutterItem)) { if (OnOffType.ON.equals(item.getStateAs(OnOffType.class))) { command = OnOffType.OFF; } if (OnOffType.OFF.equals(item.getStateAs(OnOffType.class))) { command = OnOffType.ON; } if (UpDownType.UP.equals(item.getStateAs(UpDownType.class))) { command = UpDownType.DOWN; } if (UpDownType.DOWN.equals(item.getStateAs(UpDownType.class))) { command = UpDownType.UP; } } else { command = TypeParser.parseCommand(item.getAcceptedCommandTypes(), value); } if (command != null) { logger.debug("Received HTTP POST request at '{}' with value '{}'.", uriInfo.getPath(), value); eventPublisher.post(ItemEventFactory.createCommandEvent(itemname, command)); ResponseBuilder resbuilder = Response.ok(); resbuilder.type(MediaType.TEXT_PLAIN); return resbuilder.build(); } else { logger.warn("Received HTTP POST request at '{}' with an invalid status value '{}'.", uriInfo.getPath(), value); return Response.status(Status.BAD_REQUEST).build(); } } else { logger.info("Received HTTP POST request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); throw new WebApplicationException(404); } } @PUT @Path("/{itemName: [a-zA-Z_0-9]*}/members/{memberItemName: [a-zA-Z_0-9]*}") @ApiOperation(value = "Adds a new member to a group item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item or member item not found or item is not of type group item."), @ApiResponse(code = 405, message = "Member item is not editable.") }) public Response addMember(@PathParam("itemName") @ApiParam(value = "item name", required = true) String itemName, @PathParam("memberItemName") @ApiParam(value = "member item name", required = true) String memberItemName) { try { Item item = itemRegistry.getItem(itemName); if (!(item instanceof GroupItem)) { return Response.status(Status.NOT_FOUND).build(); } GroupItem groupItem = (GroupItem) item; Item memberItem = itemRegistry.getItem(memberItemName); if (!(memberItem instanceof GenericItem)) { return Response.status(Status.NOT_FOUND).build(); } if (managedItemProvider.get(memberItemName) == null) { return Response.status(Status.METHOD_NOT_ALLOWED).build(); } GenericItem genericMemberItem = (GenericItem) memberItem; genericMemberItem.addGroupName(groupItem.getName()); managedItemProvider.update(genericMemberItem); return Response.ok().build(); } catch (ItemNotFoundException e) { return Response.status(Status.NOT_FOUND).build(); } } @DELETE @Path("/{itemName: [a-zA-Z_0-9]*}/members/{memberItemName: [a-zA-Z_0-9]*}") @ApiOperation(value = "Removes an existing member from a group item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item or member item not found or item is not of type group item."), @ApiResponse(code = 405, message = "Member item is not editable.") }) public Response removeMember(@PathParam("itemName") @ApiParam(value = "item name", required = true) String itemName, @PathParam("memberItemName") @ApiParam(value = "member item name", required = true) String memberItemName) { try { Item item = itemRegistry.getItem(itemName); if (!(item instanceof GroupItem)) { return Response.status(Status.NOT_FOUND).build(); } GroupItem groupItem = (GroupItem) item; Item memberItem = itemRegistry.getItem(memberItemName); if (!(memberItem instanceof GenericItem)) { return Response.status(Status.NOT_FOUND).build(); } if (managedItemProvider.get(memberItemName) == null) { return Response.status(Status.METHOD_NOT_ALLOWED).build(); } GenericItem genericMemberItem = (GenericItem) memberItem; genericMemberItem.removeGroupName(groupItem.getName()); managedItemProvider.update(genericMemberItem); return Response.ok().build(); } catch (ItemNotFoundException e) { return Response.status(Status.NOT_FOUND).build(); } } @DELETE @Path("/{itemname: [a-zA-Z_0-9]*}") @ApiOperation(value = "Removes an item from the registry.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found or item is not editable.") }) public Response removeItem(@PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname) { if (managedItemProvider.remove(itemname) == null) { logger.info("Received HTTP DELETE request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return Response.status(Status.NOT_FOUND).build(); } return Response.ok().build(); } @PUT @Path("/{itemname: [a-zA-Z_0-9]*}/tags/{tag}") @ApiOperation(value = "Adds a tag to an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found."), @ApiResponse(code = 405, message = "Item not editable.") }) public Response addTag(@PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @PathParam("tag") @ApiParam(value = "tag", required = true) String tag) { Item item = getItem(itemname); if (item == null) { logger.info("Received HTTP PUT request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return Response.status(Status.NOT_FOUND).build(); } if (managedItemProvider.get(itemname) == null) { return Response.status(Status.METHOD_NOT_ALLOWED).build(); } ((ActiveItem) item).addTag(tag); managedItemProvider.update(item); return Response.ok().build(); } @DELETE @Path("/{itemname: [a-zA-Z_0-9]*}/tags/{tag}") @ApiOperation(value = "Removes a tag from an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found."), @ApiResponse(code = 405, message = "Item not editable.") }) public Response removeTag(@PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @PathParam("tag") @ApiParam(value = "tag", required = true) String tag) { Item item = getItem(itemname); if (item == null) { logger.info("Received HTTP DELETE request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return Response.status(Status.NOT_FOUND).build(); } if (managedItemProvider.get(itemname) == null) { return Response.status(Status.METHOD_NOT_ALLOWED).build(); } ((ActiveItem) item).removeTag(tag); managedItemProvider.update(item); return Response.ok().build(); } /** * Create or Update an item by supplying an item bean. * * @param itemname * @param item the item bean. * @return */ @PUT @Path("/{itemname: [a-zA-Z_0-9]*}") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Adds a new item to the registry or updates the existing item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 201, message = "Item created."), @ApiResponse(code = 400, message = "Item null."), @ApiResponse(code = 404, message = "Item not found."), @ApiResponse(code = 405, message = "Item not editable.") }) public Response createOrUpdateItem( @HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language, @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @ApiParam(value = "item data", required = true) GroupItemDTO item) { final Locale locale = LocaleUtil.getLocale(language); // If we didn't get an item bean, then return! if (item == null) { return Response.status(Status.BAD_REQUEST).build(); } GenericItem newItem = null; if (item.type != null && item.type.equals("GroupItem")) { GenericItem baseItem = null; if (!Strings.isNullOrEmpty(item.groupType)) { baseItem = createItem(item.groupType, itemname); } newItem = new GroupItem(itemname, baseItem); } else { String itemType = item.type.substring(0, item.type.length() - 4); newItem = createItem(itemType, itemname); } if (newItem == null) { logger.warn("Received HTTP PUT request at '{}' with an invalid item type '{}'.", uriInfo.getPath(), item.type); return Response.status(Status.BAD_REQUEST).build(); } // See if an existing item of this name exists. Item existingItem = getItem(itemname); // Update the label newItem.setLabel(item.label); if (item.category != null) { newItem.setCategory(item.category); } if (item.groupNames != null) { newItem.addGroupNames(item.groupNames); } if (item.tags != null) { newItem.addTags(item.tags); } // Save the item if (existingItem == null) { // item does not yet exist, create it managedItemProvider.add(newItem); return getItemResponse(Status.CREATED, newItem, locale, null); } else if (managedItemProvider.get(itemname) != null) { // item already exists as a managed item, update it managedItemProvider.update(newItem); return getItemResponse(Status.OK, newItem, locale, null); } else { // Item exists but cannot be updated logger.warn("Cannot update existing item '{}', because is not managed.", itemname); return JSONResponse.createErrorResponse(Status.METHOD_NOT_ALLOWED, "Cannot update non-managed Item " + itemname); } } /** * helper: Create new item with name and type * * @param itemType type of the item * @param itemname name of the item * @return the newly created item */ private GenericItem createItem(String itemType, String itemname) { GenericItem newItem = null; for (ItemFactory itemFactory : itemFactories) { newItem = itemFactory.createItem(itemType, itemname); if (newItem != null) { break; } } return newItem; } /** * helper: Response to be sent to client if a Thing cannot be found * * @param thingUID * @return Response configured for 'item not found' */ private static Response getItemNotFoundResponse(String itemname) { String message = "Item " + itemname + " does not exist!"; return JSONResponse.createResponse(Status.NOT_FOUND, null, message); } /** * Prepare a response representing the Item depending in the status. * * @param status * @param item can be null * @param locale the locale * @param errormessage optional message in case of error * @return Response configured to represent the Item in depending on the status */ private Response getItemResponse(Status status, Item item, Locale locale, String errormessage) { Object entity = null != item ? EnrichedItemDTOMapper.map(item, true, uriInfo.getBaseUri(), locale) : null; return JSONResponse.createResponse(status, entity, errormessage); } /** * convenience shortcut * * @param itemname * @return Item addressed by itemname */ private Item getItem(String itemname) { Item item = itemRegistry.get(itemname); return item; } private List<EnrichedItemDTO> getItemBeans(String type, String tags, boolean recursive, Locale locale) { List<EnrichedItemDTO> beans = new LinkedList<>(); Collection<Item> items; if (tags == null) { if (type == null) { items = itemRegistry.getItems(); } else { items = itemRegistry.getItemsOfType(type); } } else { String[] tagList = tags.split(","); if (type == null) { items = itemRegistry.getItemsByTag(tagList); } else { items = itemRegistry.getItemsByTagAndType(type, tagList); } } if (items != null) { for (Item item : items) { beans.add(EnrichedItemDTOMapper.map(item, recursive, uriInfo.getBaseUri(), locale)); } } return beans; } }
bundles/io/org.eclipse.smarthome.io.rest.core/src/main/java/org/eclipse/smarthome/io/rest/core/item/ItemResource.java
/** * Copyright (c) 2014-2015 openHAB UG (haftungsbeschraenkt) and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.smarthome.io.rest.core.item; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Set; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import org.eclipse.smarthome.core.events.EventPublisher; import org.eclipse.smarthome.core.items.ActiveItem; import org.eclipse.smarthome.core.items.GenericItem; import org.eclipse.smarthome.core.items.GroupItem; import org.eclipse.smarthome.core.items.Item; import org.eclipse.smarthome.core.items.ItemFactory; import org.eclipse.smarthome.core.items.ItemNotFoundException; import org.eclipse.smarthome.core.items.ItemRegistry; import org.eclipse.smarthome.core.items.ManagedItemProvider; import org.eclipse.smarthome.core.items.dto.GroupItemDTO; import org.eclipse.smarthome.core.items.events.ItemEventFactory; import org.eclipse.smarthome.core.library.items.RollershutterItem; import org.eclipse.smarthome.core.library.items.SwitchItem; import org.eclipse.smarthome.core.library.types.OnOffType; import org.eclipse.smarthome.core.library.types.UpDownType; import org.eclipse.smarthome.core.types.Command; import org.eclipse.smarthome.core.types.State; import org.eclipse.smarthome.core.types.TypeParser; import org.eclipse.smarthome.io.rest.JSONResponse; import org.eclipse.smarthome.io.rest.LocaleUtil; import org.eclipse.smarthome.io.rest.RESTResource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Strings; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; /** * <p> * This class acts as a REST resource for items and provides different methods to interact with them, like retrieving * lists of items, sending commands to them or checking a single status. * </p> * * <p> * The typical content types are plain text for status values and XML or JSON(P) for more complex data structures * </p> * * <p> * This resource is registered with the Jersey servlet. * </p> * * @author Kai Kreuzer - Initial contribution and API * @author Dennis Nobel - Added methods for item management * @author Andre Fuechsel - Added tag support * @author Chris Jackson - Added method to write complete item bean * @author Stefan Bußweiler - Migration to new ESH event concept * @author Yordan Zhelev - Added Swagger annotations * @author Jörg Plewe - refactoring, error handling */ @Path(ItemResource.PATH_ITEMS) @Api(value = ItemResource.PATH_ITEMS) public class ItemResource implements RESTResource { private final Logger logger = LoggerFactory.getLogger(ItemResource.class); /** The URI path to this resource */ public static final String PATH_ITEMS = "items"; @Context UriInfo uriInfo; @Context UriInfo localUriInfo; private ItemRegistry itemRegistry; private EventPublisher eventPublisher; private ManagedItemProvider managedItemProvider; private Set<ItemFactory> itemFactories = new HashSet<>(); protected void setItemRegistry(ItemRegistry itemRegistry) { this.itemRegistry = itemRegistry; } protected void unsetItemRegistry(ItemRegistry itemRegistry) { this.itemRegistry = null; } protected void setEventPublisher(EventPublisher eventPublisher) { this.eventPublisher = eventPublisher; } protected void unsetEventPublisher(EventPublisher eventPublisher) { this.eventPublisher = null; } protected void setManagedItemProvider(ManagedItemProvider managedItemProvider) { this.managedItemProvider = managedItemProvider; } protected void unsetManagedItemProvider(ManagedItemProvider managedItemProvider) { this.managedItemProvider = null; } protected void addItemFactory(ItemFactory itemFactory) { this.itemFactories.add(itemFactory); } protected void removeItemFactory(ItemFactory itemFactory) { this.itemFactories.remove(itemFactory); } @GET @Produces(MediaType.APPLICATION_JSON) @ApiOperation(value = "Get all available items.", response = EnrichedItemDTO.class, responseContainer = "List") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK") }) public Response getItems(@HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language, @QueryParam("type") @ApiParam(value = "item type filter", required = false) String type, @QueryParam("tags") @ApiParam(value = "item tag filter", required = false) String tags, @DefaultValue("false") @QueryParam("recursive") @ApiParam(value = "get member items recursivly", required = false) boolean recursive) { final Locale locale = LocaleUtil.getLocale(language); logger.debug("Received HTTP GET request at '{}'", uriInfo.getPath()); Object responseObject = getItemBeans(type, tags, recursive, locale); return Response.ok(responseObject).build(); } @GET @Path("/{itemname: [a-zA-Z_0-9]*}") @Produces({ MediaType.WILDCARD }) @ApiOperation(value = "Gets a single item.", response = EnrichedItemDTO.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found") }) public Response getItemData(@HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language, @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname) { final Locale locale = LocaleUtil.getLocale(language); logger.debug("Received HTTP GET request at '{}'", uriInfo.getPath()); // get item Item item = getItem(itemname); // if it exists if (item != null) { logger.debug("Received HTTP GET request at '{}'.", uriInfo.getPath()); return getItemResponse(Status.OK, item, locale, null); } else { logger.info("Received HTTP GET request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return getItemNotFoundResponse(itemname); } } /** * * @param itemname * @return */ @GET @Path("/{itemname: [a-zA-Z_0-9]*}/state") @Produces(MediaType.TEXT_PLAIN) @ApiOperation(value = "Gets the state of an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found") }) public Response getPlainItemState( @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname) { // get item Item item = getItem(itemname); // if it exists if (item != null) { logger.debug("Received HTTP GET request at '{}'.", uriInfo.getPath()); // we cannot use JSONResponse.createResponse() bc. MediaType.TEXT_PLAIN // return JSONResponse.createResponse(Status.OK, item.getState().toString(), null); return Response.ok(item.getState().toString()).build(); } else { logger.info("Received HTTP GET request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return getItemNotFoundResponse(itemname); } } @PUT @Path("/{itemname: [a-zA-Z_0-9]*}/state") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Updates the state of an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found"), @ApiResponse(code = 400, message = "Item state null") }) public Response putItemState( @HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language, @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @ApiParam(value = "valid item state (e.g. ON, OFF)", required = true) String value) { final Locale locale = LocaleUtil.getLocale(language); // get Item Item item = getItem(itemname); // if Item exists if (item != null) { // try to parse a State from the input State state = TypeParser.parseState(item.getAcceptedDataTypes(), value); if (state != null) { // set State and report OK logger.debug("Received HTTP PUT request at '{}' with value '{}'.", uriInfo.getPath(), value); eventPublisher.post(ItemEventFactory.createStateEvent(itemname, state)); return getItemResponse(Status.ACCEPTED, null, locale, null); } else { // State could not be parsed logger.warn("Received HTTP PUT request at '{}' with an invalid status value '{}'.", uriInfo.getPath(), value); return JSONResponse.createErrorResponse(Status.BAD_REQUEST, "State could not be parsed: " + value); } } else { // Item does not exist logger.info("Received HTTP PUT request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return getItemNotFoundResponse(itemname); } } @POST @Path("/{itemname: [a-zA-Z_0-9]*}") @Consumes(MediaType.TEXT_PLAIN) @ApiOperation(value = "Sends a command to an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found"), @ApiResponse(code = 400, message = "Item command null") }) public Response postItemCommand( @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @ApiParam(value = "valid item command (e.g. ON, OFF, UP, DOWN, REFRESH)", required = true) String value) { Item item = getItem(itemname); Command command = null; if (item != null) { if ("toggle".equalsIgnoreCase(value) && (item instanceof SwitchItem || item instanceof RollershutterItem)) { if (OnOffType.ON.equals(item.getStateAs(OnOffType.class))) { command = OnOffType.OFF; } if (OnOffType.OFF.equals(item.getStateAs(OnOffType.class))) { command = OnOffType.ON; } if (UpDownType.UP.equals(item.getStateAs(UpDownType.class))) { command = UpDownType.DOWN; } if (UpDownType.DOWN.equals(item.getStateAs(UpDownType.class))) { command = UpDownType.UP; } } else { command = TypeParser.parseCommand(item.getAcceptedCommandTypes(), value); } if (command != null) { logger.debug("Received HTTP POST request at '{}' with value '{}'.", uriInfo.getPath(), value); eventPublisher.post(ItemEventFactory.createCommandEvent(itemname, command)); ResponseBuilder resbuilder = Response.ok(); resbuilder.type(MediaType.TEXT_PLAIN); return resbuilder.build(); } else { logger.warn("Received HTTP POST request at '{}' with an invalid status value '{}'.", uriInfo.getPath(), value); return Response.status(Status.BAD_REQUEST).build(); } } else { logger.info("Received HTTP POST request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); throw new WebApplicationException(404); } } @PUT @Path("/{itemName: [a-zA-Z_0-9]*}/members/{memberItemName: [a-zA-Z_0-9]*}") @ApiOperation(value = "Adds a new member to a group item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item or member item not found or item is not of type group item."), @ApiResponse(code = 405, message = "Member item is not editable.") }) public Response addMember(@PathParam("itemName") @ApiParam(value = "item name", required = true) String itemName, @PathParam("memberItemName") @ApiParam(value = "member item name", required = true) String memberItemName) { try { Item item = itemRegistry.getItem(itemName); if (!(item instanceof GroupItem)) { return Response.status(Status.NOT_FOUND).build(); } GroupItem groupItem = (GroupItem) item; Item memberItem = itemRegistry.getItem(memberItemName); if (!(memberItem instanceof GenericItem)) { return Response.status(Status.NOT_FOUND).build(); } if (managedItemProvider.get(memberItemName) == null) { return Response.status(Status.METHOD_NOT_ALLOWED).build(); } GenericItem genericMemberItem = (GenericItem) memberItem; genericMemberItem.addGroupName(groupItem.getName()); managedItemProvider.update(genericMemberItem); return Response.ok().build(); } catch (ItemNotFoundException e) { return Response.status(Status.NOT_FOUND).build(); } } @DELETE @Path("/{itemName: [a-zA-Z_0-9]*}/members/{memberItemName: [a-zA-Z_0-9]*}") @ApiOperation(value = "Removes an existing member from a group item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item or member item not found or item is not of type group item."), @ApiResponse(code = 405, message = "Member item is not editable.") }) public Response removeMember(@PathParam("itemName") @ApiParam(value = "item name", required = true) String itemName, @PathParam("memberItemName") @ApiParam(value = "member item name", required = true) String memberItemName) { try { Item item = itemRegistry.getItem(itemName); if (!(item instanceof GroupItem)) { return Response.status(Status.NOT_FOUND).build(); } GroupItem groupItem = (GroupItem) item; Item memberItem = itemRegistry.getItem(memberItemName); if (!(memberItem instanceof GenericItem)) { return Response.status(Status.NOT_FOUND).build(); } if (managedItemProvider.get(memberItemName) == null) { return Response.status(Status.METHOD_NOT_ALLOWED).build(); } GenericItem genericMemberItem = (GenericItem) memberItem; genericMemberItem.removeGroupName(groupItem.getName()); managedItemProvider.update(genericMemberItem); return Response.ok().build(); } catch (ItemNotFoundException e) { return Response.status(Status.NOT_FOUND).build(); } } @DELETE @Path("/{itemname: [a-zA-Z_0-9]*}") @ApiOperation(value = "Removes an item from the registry.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found or item is not editable.") }) public Response removeItem(@PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname) { if (managedItemProvider.remove(itemname) == null) { logger.info("Received HTTP DELETE request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return Response.status(Status.NOT_FOUND).build(); } return Response.ok().build(); } @PUT @Path("/{itemname: [a-zA-Z_0-9]*}/tags/{tag}") @ApiOperation(value = "Adds a tag to an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found."), @ApiResponse(code = 405, message = "Item not editable.") }) public Response addTag(@PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @PathParam("tag") @ApiParam(value = "tag", required = true) String tag) { Item item = getItem(itemname); if (item == null) { logger.info("Received HTTP PUT request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return Response.status(Status.NOT_FOUND).build(); } if (managedItemProvider.get(itemname) == null) { return Response.status(Status.METHOD_NOT_ALLOWED).build(); } ((ActiveItem) item).addTag(tag); managedItemProvider.update(item); return Response.ok().build(); } @DELETE @Path("/{itemname: [a-zA-Z_0-9]*}/tags/{tag: [a-zA-Z_0-9]*}") @ApiOperation(value = "Removes a tag from an item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 404, message = "Item not found."), @ApiResponse(code = 405, message = "Item not editable.") }) public Response removeTag(@PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @PathParam("tag") @ApiParam(value = "tag", required = true) String tag) { Item item = getItem(itemname); if (item == null) { logger.info("Received HTTP DELETE request at '{}' for the unknown item '{}'.", uriInfo.getPath(), itemname); return Response.status(Status.NOT_FOUND).build(); } if (managedItemProvider.get(itemname) == null) { return Response.status(Status.METHOD_NOT_ALLOWED).build(); } ((ActiveItem) item).removeTag(tag); managedItemProvider.update(item); return Response.ok().build(); } /** * Create or Update an item by supplying an item bean. * * @param itemname * @param item the item bean. * @return */ @PUT @Path("/{itemname: [a-zA-Z_0-9]*}") @Consumes(MediaType.APPLICATION_JSON) @ApiOperation(value = "Adds a new item to the registry or updates the existing item.") @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), @ApiResponse(code = 201, message = "Item created."), @ApiResponse(code = 400, message = "Item null."), @ApiResponse(code = 404, message = "Item not found."), @ApiResponse(code = 405, message = "Item not editable.") }) public Response createOrUpdateItem( @HeaderParam(HttpHeaders.ACCEPT_LANGUAGE) @ApiParam(value = "language") String language, @PathParam("itemname") @ApiParam(value = "item name", required = true) String itemname, @ApiParam(value = "item data", required = true) GroupItemDTO item) { final Locale locale = LocaleUtil.getLocale(language); // If we didn't get an item bean, then return! if (item == null) { return Response.status(Status.BAD_REQUEST).build(); } GenericItem newItem = null; if (item.type != null && item.type.equals("GroupItem")) { GenericItem baseItem = null; if (!Strings.isNullOrEmpty(item.groupType)) { baseItem = createItem(item.groupType, itemname); } newItem = new GroupItem(itemname, baseItem); } else { String itemType = item.type.substring(0, item.type.length() - 4); newItem = createItem(itemType, itemname); } if (newItem == null) { logger.warn("Received HTTP PUT request at '{}' with an invalid item type '{}'.", uriInfo.getPath(), item.type); return Response.status(Status.BAD_REQUEST).build(); } // See if an existing item of this name exists. Item existingItem = getItem(itemname); // Update the label newItem.setLabel(item.label); if (item.category != null) { newItem.setCategory(item.category); } if (item.groupNames != null) { newItem.addGroupNames(item.groupNames); } if (item.tags != null) { newItem.addTags(item.tags); } // Save the item if (existingItem == null) { // item does not yet exist, create it managedItemProvider.add(newItem); return getItemResponse(Status.CREATED, newItem, locale, null); } else if (managedItemProvider.get(itemname) != null) { // item already exists as a managed item, update it managedItemProvider.update(newItem); return getItemResponse(Status.OK, newItem, locale, null); } else { // Item exists but cannot be updated logger.warn("Cannot update existing item '{}', because is not managed.", itemname); return JSONResponse.createErrorResponse(Status.METHOD_NOT_ALLOWED, "Cannot update non-managed Item " + itemname); } } /** * helper: Create new item with name and type * * @param itemType type of the item * @param itemname name of the item * @return the newly created item */ private GenericItem createItem(String itemType, String itemname) { GenericItem newItem = null; for (ItemFactory itemFactory : itemFactories) { newItem = itemFactory.createItem(itemType, itemname); if (newItem != null) { break; } } return newItem; } /** * helper: Response to be sent to client if a Thing cannot be found * * @param thingUID * @return Response configured for 'item not found' */ private static Response getItemNotFoundResponse(String itemname) { String message = "Item " + itemname + " does not exist!"; return JSONResponse.createResponse(Status.NOT_FOUND, null, message); } /** * Prepare a response representing the Item depending in the status. * * @param status * @param item can be null * @param locale the locale * @param errormessage optional message in case of error * @return Response configured to represent the Item in depending on the status */ private Response getItemResponse(Status status, Item item, Locale locale, String errormessage) { Object entity = null != item ? EnrichedItemDTOMapper.map(item, true, uriInfo.getBaseUri(), locale) : null; return JSONResponse.createResponse(status, entity, errormessage); } /** * convenience shortcut * * @param itemname * @return Item addressed by itemname */ private Item getItem(String itemname) { Item item = itemRegistry.get(itemname); return item; } private List<EnrichedItemDTO> getItemBeans(String type, String tags, boolean recursive, Locale locale) { List<EnrichedItemDTO> beans = new LinkedList<>(); Collection<Item> items; if (tags == null) { if (type == null) { items = itemRegistry.getItems(); } else { items = itemRegistry.getItemsOfType(type); } } else { String[] tagList = tags.split(","); if (type == null) { items = itemRegistry.getItemsByTag(tagList); } else { items = itemRegistry.getItemsByTagAndType(type, tagList); } } if (items != null) { for (Item item : items) { beans.add(EnrichedItemDTOMapper.map(item, recursive, uriInfo.getBaseUri(), locale)); } } return beans; } }
allow colon in tag names on DELETE (#1399) fixes https://github.com/eclipse/smarthome/issues/1381 PR: https://github.com/eclipse/smarthome/pull/1399 Signed-off-by: Kai Kreuzer <[email protected]>
bundles/io/org.eclipse.smarthome.io.rest.core/src/main/java/org/eclipse/smarthome/io/rest/core/item/ItemResource.java
allow colon in tag names on DELETE (#1399)
<ide><path>undles/io/org.eclipse.smarthome.io.rest.core/src/main/java/org/eclipse/smarthome/io/rest/core/item/ItemResource.java <ide> } <ide> <ide> @DELETE <del> @Path("/{itemname: [a-zA-Z_0-9]*}/tags/{tag: [a-zA-Z_0-9]*}") <add> @Path("/{itemname: [a-zA-Z_0-9]*}/tags/{tag}") <ide> @ApiOperation(value = "Removes a tag from an item.") <ide> @ApiResponses(value = { @ApiResponse(code = 200, message = "OK"), <ide> @ApiResponse(code = 404, message = "Item not found."),
Java
apache-2.0
784e09428d39ea550cdb16531d12373311494e15
0
gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.tasks.diagnostics; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import org.apache.commons.lang.StringUtils; import org.gradle.api.DefaultTask; import org.gradle.api.Incubating; import org.gradle.api.InvalidUserDataException; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.result.DependencyResult; import org.gradle.api.artifacts.result.ResolutionResult; import org.gradle.api.artifacts.result.ResolvedVariantResult; import org.gradle.api.attributes.Attribute; import org.gradle.api.attributes.AttributeContainer; import org.gradle.api.attributes.HasAttributes; import org.gradle.api.internal.artifacts.configurations.ResolvableDependenciesInternal; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy.VersionComparator; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy.VersionParser; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy.VersionSelectorScheme; import org.gradle.api.internal.attributes.AttributeContainerInternal; import org.gradle.api.internal.attributes.AttributesSchemaInternal; import org.gradle.api.internal.attributes.ImmutableAttributesFactory; import org.gradle.api.internal.model.NamedObjectInstantiator; import org.gradle.api.provider.Property; import org.gradle.api.specs.Spec; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.diagnostics.internal.ConfigurationFinder; import org.gradle.api.tasks.diagnostics.internal.dependencies.AttributeMatchDetails; import org.gradle.api.tasks.diagnostics.internal.dependencies.MatchType; import org.gradle.api.tasks.diagnostics.internal.dsl.DependencyResultSpecNotationConverter; import org.gradle.api.tasks.diagnostics.internal.graph.DependencyGraphsRenderer; import org.gradle.api.tasks.diagnostics.internal.graph.NodeRenderer; import org.gradle.api.tasks.diagnostics.internal.graph.nodes.RenderableDependency; import org.gradle.api.tasks.diagnostics.internal.graph.nodes.Section; import org.gradle.api.tasks.diagnostics.internal.insight.DependencyInsightReporter; import org.gradle.api.tasks.diagnostics.internal.text.StyledTable; import org.gradle.api.tasks.options.Option; import org.gradle.initialization.StartParameterBuildOptions; import org.gradle.internal.component.model.AttributeMatcher; import org.gradle.internal.graph.GraphRenderer; import org.gradle.internal.logging.text.StyledTextOutput; import org.gradle.internal.logging.text.StyledTextOutputFactory; import org.gradle.internal.snapshot.impl.CoercingStringValueSnapshot; import org.gradle.internal.typeconversion.NotationParser; import org.gradle.work.DisableCachingByDefault; import javax.annotation.Nullable; import javax.inject.Inject; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; import static org.gradle.internal.logging.text.StyledTextOutput.Style.AlternativeSuccess; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Description; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Failure; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Header; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Identifier; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Info; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Normal; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Success; import static org.gradle.internal.logging.text.StyledTextOutput.Style.UserInput; /** * Generates a report that attempts to answer questions like: * <ul> * <li>Why is this dependency in the dependency graph?</li> * <li>Exactly which dependencies are pulling this dependency into the graph?</li> * <li>What is the actual version (i.e. *selected* version) of the dependency that will be used? Is it the same as what was *requested*?</li> * <li>Why is the *selected* version of a dependency different to the *requested*?</li> * </ul> * * Use this task to get insight into a particular dependency (or dependencies) * and find out what exactly happens during dependency resolution and conflict resolution. * If the dependency version was forced or selected by the conflict resolution * this information will be available in the report. * <p> * While the regular dependencies report ({@link DependencyReportTask}) shows the path from the top level dependencies down through the transitive dependencies, * the dependency insight report shows the path from a particular dependency to the dependencies that pulled it in. * That is, it is an inverted view of the regular dependencies report. * <p> * The task requires setting the dependency spec and the configuration. * For more information on how to configure those please refer to docs for * {@link DependencyInsightReportTask#setDependencySpec(Object)} and * {@link DependencyInsightReportTask#setConfiguration(String)}. * <p> * The task can also be configured from the command line. * For more information please refer to {@link DependencyInsightReportTask#setDependencySpec(Object)} * and {@link DependencyInsightReportTask#setConfiguration(String)} */ @DisableCachingByDefault(because = "Produces only non-cacheable console output") public class DependencyInsightReportTask extends DefaultTask { private final NamedObjectInstantiator namedObjectInstantiator; private Configuration configuration; private Spec<DependencyResult> dependencySpec; private boolean showSinglePathToDependency; private final Property<Boolean> showingAllVariants = getProject().getObjects().property(Boolean.class); @Inject public DependencyInsightReportTask(NamedObjectInstantiator namedObjectInstantiator) { this.namedObjectInstantiator = namedObjectInstantiator; } /** * Selects the dependency (or dependencies if multiple matches found) to show the report for. */ @Internal public Spec<DependencyResult> getDependencySpec() { return dependencySpec; } /** * The dependency spec selects the dependency (or dependencies if multiple matches found) to show the report for. The spec receives an instance of {@link DependencyResult} as parameter. */ public void setDependencySpec(Spec<DependencyResult> dependencySpec) { this.dependencySpec = dependencySpec; } /** * Configures the dependency to show the report for. * Multiple notation formats are supported: Strings, instances of {@link Spec} * and groovy closures. Spec and closure receive {@link DependencyResult} as parameter. * Examples of String notation: 'org.slf4j:slf4j-api', 'slf4j-api', or simply: 'slf4j'. * The input may potentially match multiple dependencies. * See also {@link DependencyInsightReportTask#setDependencySpec(Spec)} * <p> * This method is exposed to the command line interface. Example usage: * <pre>gradle dependencyInsight --dependency slf4j</pre> */ @Option(option = "dependency", description = "Shows the details of given dependency.") public void setDependencySpec(Object dependencyInsightNotation) { NotationParser<Object, Spec<DependencyResult>> parser = DependencyResultSpecNotationConverter.parser(); this.dependencySpec = parser.parseNotation(dependencyInsightNotation); } /** * Configuration to look the dependency in */ @Internal public Configuration getConfiguration() { return configuration; } /** * Sets the configuration to look the dependency in. */ public void setConfiguration(Configuration configuration) { this.configuration = configuration; } /** * Sets the configuration (via name) to look the dependency in. * <p> * This method is exposed to the command line interface. Example usage: * <pre>gradle dependencyInsight --configuration runtime --dependency slf4j</pre> */ @Option(option = "configuration", description = "Looks for the dependency in given configuration.") public void setConfiguration(String configurationName) { this.configuration = ConfigurationFinder.find(getProject().getConfigurations(), configurationName); } /** * Tells if the report should only show one path to each dependency. * * @since 4.9 */ @Internal public boolean isShowSinglePathToDependency() { return showSinglePathToDependency; } /** * Tells if the report should only display a single path to each dependency, which * can be useful when the graph is large. This is false by default, meaning that for * each dependency, the report will display all paths leading to it. * * @since 4.9 */ @Option(option = "single-path", description = "Show at most one path to each dependency") public void setShowSinglePathToDependency(boolean showSinglePathToDependency) { this.showSinglePathToDependency = showSinglePathToDependency; } /** * Legacy option name for {@link #setShowSinglePathToDependency(boolean)}. This is not considered API, and should not be used. * * @since 7.5 * @deprecated should not be used, call {@link #setShowSinglePathToDependency(boolean)} instead */ @Deprecated @Incubating @Option(option = "singlepath", description = "Show at most one path to each dependency") public void setLegacyShowSinglePathToDependency(boolean showSinglePathToDependency) { this.showSinglePathToDependency = showSinglePathToDependency; } /** * Show all variants of each displayed dependency. * * @since 7.5 */ @Option(option = "all-variants", description = "Show all variants of each dependency") @Incubating @Internal public Property<Boolean> getShowingAllVariants() { return showingAllVariants; } @Inject protected StyledTextOutputFactory getTextOutputFactory() { throw new UnsupportedOperationException(); } @Inject protected VersionSelectorScheme getVersionSelectorScheme() { throw new UnsupportedOperationException(); } @Inject protected VersionComparator getVersionComparator() { throw new UnsupportedOperationException(); } @Inject protected VersionParser getVersionParser() { throw new UnsupportedOperationException(); } /** * An injected {@link ImmutableAttributesFactory}. * * @since 4.9 */ @Inject protected ImmutableAttributesFactory getAttributesFactory() { throw new UnsupportedOperationException(); } @TaskAction public void report() { final Configuration configuration = getConfiguration(); assertValidTaskConfiguration(configuration); StyledTextOutput output = getTextOutputFactory().create(getClass()); ResolutionErrorRenderer errorHandler = new ResolutionErrorRenderer(dependencySpec); Set<DependencyResult> selectedDependencies = selectDependencies(configuration, errorHandler); if (selectedDependencies.isEmpty()) { output.println("No dependencies matching given input were found in " + configuration); return; } errorHandler.renderErrors(output); renderSelectedDependencies(configuration, output, selectedDependencies); renderBuildScanHint(output); } private void renderSelectedDependencies(Configuration configuration, StyledTextOutput output, Set<DependencyResult> selectedDependencies) { GraphRenderer renderer = new GraphRenderer(output); DependencyInsightReporter reporter = new DependencyInsightReporter(getVersionSelectorScheme(), getVersionComparator(), getVersionParser()); Collection<RenderableDependency> itemsToRender = reporter.convertToRenderableItems(selectedDependencies, isShowSinglePathToDependency()); RootDependencyRenderer rootRenderer = new RootDependencyRenderer(this, configuration, getAttributesFactory()); ReplaceProjectWithConfigurationNameRenderer dependenciesRenderer = new ReplaceProjectWithConfigurationNameRenderer(configuration); DependencyGraphsRenderer dependencyGraphRenderer = new DependencyGraphsRenderer(output, renderer, rootRenderer, dependenciesRenderer); dependencyGraphRenderer.setShowSinglePath(showSinglePathToDependency); dependencyGraphRenderer.render(itemsToRender); dependencyGraphRenderer.complete(); } private void renderBuildScanHint(StyledTextOutput output) { output.println(); output.text("A web-based, searchable dependency report is available by adding the "); output.withStyle(UserInput).format("--%s", StartParameterBuildOptions.BuildScanOption.LONG_OPTION); output.println(" option."); } private void assertValidTaskConfiguration(@Nullable Configuration configuration) { if (configuration == null) { throw new InvalidUserDataException("Dependency insight report cannot be generated because the input configuration was not specified. " + "\nIt can be specified from the command line, e.g: '" + getPath() + " --configuration someConf --dependency someDep'"); } if (dependencySpec == null) { throw new InvalidUserDataException("Dependency insight report cannot be generated because the dependency to show was not specified." + "\nIt can be specified from the command line, e.g: '" + getPath() + " --dependency someDep'"); } } private Set<DependencyResult> selectDependencies(Configuration configuration, ResolutionErrorRenderer errorHandler) { ResolvableDependenciesInternal incoming = (ResolvableDependenciesInternal) configuration.getIncoming(); ResolutionResult result = incoming.getResolutionResult(errorHandler); final Set<DependencyResult> selectedDependencies = new LinkedHashSet<>(); result.allDependencies(dependencyResult -> { if (dependencySpec.isSatisfiedBy(dependencyResult)) { selectedDependencies.add(dependencyResult); } }); return selectedDependencies; } @SuppressWarnings("unchecked") private AttributeMatchDetails match(Attribute<?> actualAttribute, @Nullable Object actualValue, AttributeContainer requestedAttributes) { AttributesSchemaInternal schema = (AttributesSchemaInternal) getProject().getDependencies().getAttributesSchema(); // As far as I could tell, the only schema ever mixed in using withProducer is PreferJavaRuntimeVariant // However, that only adds disambiguation rules, which don't apply here. So this should be sufficient: AttributeMatcher matcher = schema.matcher(); for (Attribute<?> requested : requestedAttributes.keySet()) { Object requestedValue = requestedAttributes.getAttribute(requested); if (requested.getName().equals(actualAttribute.getName())) { // found an attribute with the same name, but they do not necessarily have the same type if (requested.equals(actualAttribute)) { if (Objects.equals(actualValue, requestedValue)) { return new AttributeMatchDetails(MatchType.REQUESTED, requested, requestedValue); } } else { // maybe it matched through coercion Object actualString = actualValue != null ? actualValue.toString() : null; Object requestedString = requestedValue != null ? requestedValue.toString() : null; if (Objects.equals(actualString, requestedString)) { return new AttributeMatchDetails(MatchType.REQUESTED, requested, requestedValue); } } // Coerce into the requested value, this is extremely hacky but it works if (requested.getType().isInstance(requestedValue) && actualValue instanceof String) { Object coerced = new CoercingStringValueSnapshot((String) actualValue, namedObjectInstantiator) .coerce(requested.getType()); if (coerced != null && matcher.isMatching((Attribute<Object>) requested, coerced, requestedValue)) { return new AttributeMatchDetails(MatchType.DIFFERENT_VALUE, requested, requestedValue); } } return new AttributeMatchDetails(MatchType.INCOMPATIBLE, requested, requestedValue); } } return new AttributeMatchDetails(MatchType.NOT_REQUESTED, null, null); } private static final class RootDependencyRenderer implements NodeRenderer { private final DependencyInsightReportTask task; private final Configuration configuration; private final ImmutableAttributesFactory attributesFactory; public RootDependencyRenderer(DependencyInsightReportTask task, Configuration configuration, ImmutableAttributesFactory attributesFactory) { this.task = task; this.configuration = configuration; this.attributesFactory = attributesFactory; } @Override public void renderNode(StyledTextOutput out, RenderableDependency dependency, boolean alreadyRendered) { out.withStyle(Identifier).text(dependency.getName()); if (StringUtils.isNotEmpty(dependency.getDescription())) { out.withStyle(Description).text(" (" + dependency.getDescription() + ")"); } switch (dependency.getResolutionState()) { case FAILED: out.withStyle(Failure).text(" FAILED"); break; case RESOLVED: case RESOLVED_CONSTRAINT: break; case UNRESOLVED: out.withStyle(Failure).text(" (n)"); break; } printVariantDetails(out, dependency); printExtraDetails(out, dependency); } private void printExtraDetails(StyledTextOutput out, RenderableDependency dependency) { List<Section> extraDetails = dependency.getExtraDetails(); if (!extraDetails.isEmpty()) { printSections(out, extraDetails, 1); } } private void printSections(StyledTextOutput out, List<Section> extraDetails, int depth) { for (Section extraDetail : extraDetails) { printSection(out, extraDetail, depth); printSections(out, extraDetail.getChildren(), depth + 1); } } private void printSection(StyledTextOutput out, Section extraDetail, int depth) { out.println(); String indent = StringUtils.leftPad("", 3 * depth) + (depth > 1 ? "- " : ""); String appendix = extraDetail.getChildren().isEmpty() ? "" : ":"; String description = StringUtils.trim(extraDetail.getDescription()); String padding = "\n" + StringUtils.leftPad("", indent.length()); description = description.replaceAll("(?m)(\r?\n)", padding); out.withStyle(Description).text(indent + description + appendix); } private void printVariantDetails(StyledTextOutput out, RenderableDependency dependency) { if (dependency.getResolvedVariants().isEmpty() && dependency.getAllVariants().isEmpty()) { return; } Set<String> selectedVariantNames = dependency.getResolvedVariants() .stream() .map(ResolvedVariantResult::getDisplayName) .collect(Collectors.toSet()); if (task.getShowingAllVariants().get()) { out.style(Header); out.println(); out.text("-------------------").println(); out.text("Selected Variant(s)").println(); out.text("-------------------"); out.style(Normal); } for (ResolvedVariantResult variant : dependency.getResolvedVariants()) { printVariant(out, dependency, variant, true); } if (task.getShowingAllVariants().get()) { out.style(Header); out.text("---------------------").println(); out.text("Unselected Variant(s)").println(); out.text("---------------------"); out.style(Normal); for (ResolvedVariantResult variant : dependency.getAllVariants()) { if (selectedVariantNames.contains(variant.getDisplayName())) { continue; } printVariant(out, dependency, variant, false); } } } private void printVariant( StyledTextOutput out, RenderableDependency dependency, ResolvedVariantResult variant, boolean selected ) { AttributeContainer attributes = variant.getAttributes(); AttributeContainer requested = getRequestedAttributes(configuration, dependency); AttributeBuckets buckets = bucketAttributes(attributes, requested); out.println().style(Normal).text("Variant "); // Style the name based on whether it is selected or not. if (selected) { out.style(Success); } else if (buckets.bothAttributes.values().stream().noneMatch(v -> v.matchType() == MatchType.INCOMPATIBLE)) { out.style(AlternativeSuccess); } else { out.style(Failure); } out.text(variant.getDisplayName()).style(Normal).text(":").println(); if (!attributes.isEmpty() || !requested.isEmpty()) { writeAttributeBlock(out, attributes, requested, buckets, selected); } } private AttributeContainer getRequestedAttributes(Configuration configuration, RenderableDependency dependency) { if (dependency instanceof HasAttributes) { AttributeContainer dependencyAttributes = ((HasAttributes) dependency).getAttributes(); return concat(configuration.getAttributes(), dependencyAttributes); } return configuration.getAttributes(); } private AttributeContainer concat(AttributeContainer configAttributes, AttributeContainer dependencyAttributes) { return attributesFactory.concat( ((AttributeContainerInternal) configAttributes).asImmutable(), ((AttributeContainerInternal) dependencyAttributes).asImmutable()); } private void writeAttributeBlock( StyledTextOutput out, AttributeContainer attributes, AttributeContainer requested, AttributeBuckets buckets, boolean selected ) { new StyledTable.Renderer().render( createAttributeTable(attributes, requested, buckets, selected), out ); } private static final class AttributeBuckets { List<Attribute<?>> providedAttributes = new ArrayList<>(); Map<Attribute<?>, AttributeMatchDetails> bothAttributes = new LinkedHashMap<>(); List<Attribute<?>> requestedAttributes = new ArrayList<>(); } private StyledTable createAttributeTable( AttributeContainer attributes, AttributeContainer requested, AttributeBuckets buckets, boolean selected ) { ImmutableList.Builder<String> header = ImmutableList.<String>builder() .add("Attribute Name", "Provided", "Requested"); if (!selected) { header.add("Compatibility"); } ImmutableList<StyledTable.Row> rows = buildRows(attributes, requested, buckets, selected); return new StyledTable(Strings.repeat(" ", 2), header.build(), rows); } private ImmutableList<StyledTable.Row> buildRows( AttributeContainer attributes, AttributeContainer requested, AttributeBuckets buckets, boolean selected ) { ImmutableList.Builder<StyledTable.Row> rows = ImmutableList.builder(); for (Attribute<?> attribute : buckets.providedAttributes) { rows.add(createProvidedRow(attributes, selected, attribute)); } for (Map.Entry<Attribute<?>, AttributeMatchDetails> entry : buckets.bothAttributes.entrySet()) { rows.add(createMatchBasedRow(attributes, selected, entry)); } for (Attribute<?> attribute : buckets.requestedAttributes) { rows.add(createRequestedRow(requested, selected, attribute)); } return rows.build(); } private AttributeBuckets bucketAttributes(AttributeContainer attributes, AttributeContainer requested) { // Bucket attributes into three groups: // 1. Attributes that are only in the variant // 2. Attributes that are both in the variant and requested by the configuration // 3. Attributes that are only in the requested configuration AttributeBuckets buckets = new AttributeBuckets(); for (Attribute<?> attribute : attributes.keySet()) { AttributeMatchDetails details = task.match(attribute, attributes.getAttribute(attribute), requested); if (details.matchType() != MatchType.NOT_REQUESTED) { buckets.bothAttributes.put(attribute, details); } else { buckets.providedAttributes.add(attribute); } } for (Attribute<?> attribute : requested.keySet()) { // If it's not in the matches, it's only in the requested attributes if (buckets.bothAttributes.values().stream().map(AttributeMatchDetails::requested).noneMatch(Predicate.isEqual(attribute))) { buckets.requestedAttributes.add(attribute); } } return buckets; } private StyledTable.Row createProvidedRow(AttributeContainer attributes, boolean selected, Attribute<?> attribute) { Object providedValue = attributes.getAttribute(attribute); ImmutableList.Builder<String> text = ImmutableList.<String>builder() .add( attribute.getName(), providedValue == null ? "" : providedValue.toString(), "" ); if (!selected) { text.add("Compatible"); } return new StyledTable.Row(text.build(), Info); } private StyledTable.Row createMatchBasedRow(AttributeContainer attributes, boolean selected, Map.Entry<Attribute<?>, AttributeMatchDetails> entry) { Object providedValue = attributes.getAttribute(entry.getKey()); AttributeMatchDetails match = entry.getValue(); ImmutableList.Builder<String> text = ImmutableList.<String>builder() .add( entry.getKey().getName(), providedValue == null ? "" : providedValue.toString(), String.valueOf(entry.getValue().requestedValue()) ); StyledTextOutput.Style style; switch (match.matchType()) { case REQUESTED: style = Success; break; case DIFFERENT_VALUE: style = AlternativeSuccess; break; case NOT_REQUESTED: style = Info; break; case INCOMPATIBLE: style = Failure; break; default: throw new IllegalStateException("Unknown match type: " + match.matchType()); } if (!selected) { text.add(match.matchType() == MatchType.INCOMPATIBLE ? "Incompatible" : "Compatible"); } return new StyledTable.Row(text.build(), style); } private StyledTable.Row createRequestedRow(AttributeContainer requested, boolean selected, Attribute<?> attribute) { Object requestedValue = requested.getAttribute(attribute); ImmutableList.Builder<String> text = ImmutableList.<String>builder() .add( attribute.getName(), "", String.valueOf(requestedValue) ); if (!selected) { text.add("Compatible"); } return new StyledTable.Row(text.build(), Info); } } private static class ReplaceProjectWithConfigurationNameRenderer implements NodeRenderer { private final Configuration configuration; public ReplaceProjectWithConfigurationNameRenderer(Configuration configuration) { this.configuration = configuration; } @Override public void renderNode(StyledTextOutput target, RenderableDependency node, boolean alreadyRendered) { boolean leaf = node.getChildren().isEmpty(); target.text(leaf ? configuration.getName() : node.getName()); if (node.getDescription() != null) { target.text(" ").withStyle(Description).text(node.getDescription()); } if (alreadyRendered && !leaf) { target.withStyle(Info).text(" (*)"); } } } }
subprojects/diagnostics/src/main/java/org/gradle/api/tasks/diagnostics/DependencyInsightReportTask.java
/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.api.tasks.diagnostics; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import org.apache.commons.lang.StringUtils; import org.gradle.api.DefaultTask; import org.gradle.api.Incubating; import org.gradle.api.InvalidUserDataException; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.result.DependencyResult; import org.gradle.api.artifacts.result.ResolutionResult; import org.gradle.api.artifacts.result.ResolvedVariantResult; import org.gradle.api.attributes.Attribute; import org.gradle.api.attributes.AttributeContainer; import org.gradle.api.attributes.HasAttributes; import org.gradle.api.internal.artifacts.configurations.ResolvableDependenciesInternal; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy.VersionComparator; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy.VersionParser; import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy.VersionSelectorScheme; import org.gradle.api.internal.attributes.AttributeContainerInternal; import org.gradle.api.internal.attributes.AttributesSchemaInternal; import org.gradle.api.internal.attributes.ImmutableAttributesFactory; import org.gradle.api.internal.model.NamedObjectInstantiator; import org.gradle.api.provider.Property; import org.gradle.api.specs.Spec; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.diagnostics.internal.ConfigurationFinder; import org.gradle.api.tasks.diagnostics.internal.dependencies.AttributeMatchDetails; import org.gradle.api.tasks.diagnostics.internal.dependencies.MatchType; import org.gradle.api.tasks.diagnostics.internal.dsl.DependencyResultSpecNotationConverter; import org.gradle.api.tasks.diagnostics.internal.graph.DependencyGraphsRenderer; import org.gradle.api.tasks.diagnostics.internal.graph.NodeRenderer; import org.gradle.api.tasks.diagnostics.internal.graph.nodes.RenderableDependency; import org.gradle.api.tasks.diagnostics.internal.graph.nodes.Section; import org.gradle.api.tasks.diagnostics.internal.insight.DependencyInsightReporter; import org.gradle.api.tasks.diagnostics.internal.text.StyledTable; import org.gradle.api.tasks.options.Option; import org.gradle.initialization.StartParameterBuildOptions; import org.gradle.internal.component.model.AttributeMatcher; import org.gradle.internal.graph.GraphRenderer; import org.gradle.internal.logging.text.StyledTextOutput; import org.gradle.internal.logging.text.StyledTextOutputFactory; import org.gradle.internal.snapshot.impl.CoercingStringValueSnapshot; import org.gradle.internal.typeconversion.NotationParser; import org.gradle.work.DisableCachingByDefault; import javax.annotation.Nullable; import javax.inject.Inject; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; import static org.gradle.internal.logging.text.StyledTextOutput.Style.AlternativeSuccess; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Description; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Failure; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Header; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Identifier; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Info; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Normal; import static org.gradle.internal.logging.text.StyledTextOutput.Style.Success; import static org.gradle.internal.logging.text.StyledTextOutput.Style.UserInput; /** * Generates a report that attempts to answer questions like: * <ul> * <li>Why is this dependency in the dependency graph?</li> * <li>Exactly which dependencies are pulling this dependency into the graph?</li> * <li>What is the actual version (i.e. *selected* version) of the dependency that will be used? Is it the same as what was *requested*?</li> * <li>Why is the *selected* version of a dependency different to the *requested*?</li> * </ul> * * Use this task to get insight into a particular dependency (or dependencies) * and find out what exactly happens during dependency resolution and conflict resolution. * If the dependency version was forced or selected by the conflict resolution * this information will be available in the report. * <p> * While the regular dependencies report ({@link DependencyReportTask}) shows the path from the top level dependencies down through the transitive dependencies, * the dependency insight report shows the path from a particular dependency to the dependencies that pulled it in. * That is, it is an inverted view of the regular dependencies report. * <p> * The task requires setting the dependency spec and the configuration. * For more information on how to configure those please refer to docs for * {@link DependencyInsightReportTask#setDependencySpec(Object)} and * {@link DependencyInsightReportTask#setConfiguration(String)}. * <p> * The task can also be configured from the command line. * For more information please refer to {@link DependencyInsightReportTask#setDependencySpec(Object)} * and {@link DependencyInsightReportTask#setConfiguration(String)} */ @DisableCachingByDefault(because = "Produces only non-cacheable console output") public class DependencyInsightReportTask extends DefaultTask { private final NamedObjectInstantiator namedObjectInstantiator; private Configuration configuration; private Spec<DependencyResult> dependencySpec; private boolean showSinglePathToDependency; private final Property<Boolean> showingAllVariants = getProject().getObjects().property(Boolean.class); @Inject public DependencyInsightReportTask(NamedObjectInstantiator namedObjectInstantiator) { this.namedObjectInstantiator = namedObjectInstantiator; } /** * Selects the dependency (or dependencies if multiple matches found) to show the report for. */ @Internal public Spec<DependencyResult> getDependencySpec() { return dependencySpec; } /** * The dependency spec selects the dependency (or dependencies if multiple matches found) to show the report for. The spec receives an instance of {@link DependencyResult} as parameter. */ public void setDependencySpec(Spec<DependencyResult> dependencySpec) { this.dependencySpec = dependencySpec; } /** * Configures the dependency to show the report for. * Multiple notation formats are supported: Strings, instances of {@link Spec} * and groovy closures. Spec and closure receive {@link DependencyResult} as parameter. * Examples of String notation: 'org.slf4j:slf4j-api', 'slf4j-api', or simply: 'slf4j'. * The input may potentially match multiple dependencies. * See also {@link DependencyInsightReportTask#setDependencySpec(Spec)} * <p> * This method is exposed to the command line interface. Example usage: * <pre>gradle dependencyInsight --dependency slf4j</pre> */ @Option(option = "dependency", description = "Shows the details of given dependency.") public void setDependencySpec(Object dependencyInsightNotation) { NotationParser<Object, Spec<DependencyResult>> parser = DependencyResultSpecNotationConverter.parser(); this.dependencySpec = parser.parseNotation(dependencyInsightNotation); } /** * Configuration to look the dependency in */ @Internal public Configuration getConfiguration() { return configuration; } /** * Sets the configuration to look the dependency in. */ public void setConfiguration(Configuration configuration) { this.configuration = configuration; } /** * Sets the configuration (via name) to look the dependency in. * <p> * This method is exposed to the command line interface. Example usage: * <pre>gradle dependencyInsight --configuration runtime --dependency slf4j</pre> */ @Option(option = "configuration", description = "Looks for the dependency in given configuration.") public void setConfiguration(String configurationName) { this.configuration = ConfigurationFinder.find(getProject().getConfigurations(), configurationName); } /** * Tells if the report should only show one path to each dependency. * * @since 4.9 */ @Internal public boolean isShowSinglePathToDependency() { return showSinglePathToDependency; } /** * Tells if the report should only display a single path to each dependency, which * can be useful when the graph is large. This is false by default, meaning that for * each dependency, the report will display all paths leading to it. * * @since 4.9 */ @Option(option = "single-path", description = "Show at most one path to each dependency") public void setShowSinglePathToDependency(boolean showSinglePathToDependency) { this.showSinglePathToDependency = showSinglePathToDependency; } /** * Legacy option name for {@link #setShowSinglePathToDependency(boolean)}. This is not considered API, and should not be used. * * @since 7.5 * @deprecated should not be used, call {@link #setShowSinglePathToDependency(boolean)} instead */ @Deprecated @Incubating @Option(option = "singlepath", description = "Show at most one path to each dependency") public void setLegacyShowSinglePathToDependency(boolean showSinglePathToDependency) { this.showSinglePathToDependency = showSinglePathToDependency; } /** * Show all variants of each displayed dependency. * * @since 7.5 */ @Option(option = "all-variants", description = "Show all variants of each dependency") @Incubating @Internal public Property<Boolean> getShowingAllVariants() { return showingAllVariants; } @Inject protected StyledTextOutputFactory getTextOutputFactory() { throw new UnsupportedOperationException(); } @Inject protected VersionSelectorScheme getVersionSelectorScheme() { throw new UnsupportedOperationException(); } @Inject protected VersionComparator getVersionComparator() { throw new UnsupportedOperationException(); } @Inject protected VersionParser getVersionParser() { throw new UnsupportedOperationException(); } /** * An injected {@link ImmutableAttributesFactory}. * * @since 4.9 */ @Inject protected ImmutableAttributesFactory getAttributesFactory() { throw new UnsupportedOperationException(); } @TaskAction public void report() { final Configuration configuration = getConfiguration(); assertValidTaskConfiguration(configuration); StyledTextOutput output = getTextOutputFactory().create(getClass()); ResolutionErrorRenderer errorHandler = new ResolutionErrorRenderer(dependencySpec); Set<DependencyResult> selectedDependencies = selectDependencies(configuration, errorHandler); if (selectedDependencies.isEmpty()) { output.println("No dependencies matching given input were found in " + configuration); return; } errorHandler.renderErrors(output); renderSelectedDependencies(configuration, output, selectedDependencies); renderBuildScanHint(output); } private void renderSelectedDependencies(Configuration configuration, StyledTextOutput output, Set<DependencyResult> selectedDependencies) { GraphRenderer renderer = new GraphRenderer(output); DependencyInsightReporter reporter = new DependencyInsightReporter(getVersionSelectorScheme(), getVersionComparator(), getVersionParser()); Collection<RenderableDependency> itemsToRender = reporter.convertToRenderableItems(selectedDependencies, isShowSinglePathToDependency()); RootDependencyRenderer rootRenderer = new RootDependencyRenderer(this, configuration, getAttributesFactory()); ReplaceProjectWithConfigurationNameRenderer dependenciesRenderer = new ReplaceProjectWithConfigurationNameRenderer(configuration); DependencyGraphsRenderer dependencyGraphRenderer = new DependencyGraphsRenderer(output, renderer, rootRenderer, dependenciesRenderer); dependencyGraphRenderer.setShowSinglePath(showSinglePathToDependency); dependencyGraphRenderer.render(itemsToRender); dependencyGraphRenderer.complete(); } private void renderBuildScanHint(StyledTextOutput output) { output.println(); output.text("A web-based, searchable dependency report is available by adding the "); output.withStyle(UserInput).format("--%s", StartParameterBuildOptions.BuildScanOption.LONG_OPTION); output.println(" option."); } private void assertValidTaskConfiguration(@Nullable Configuration configuration) { if (configuration == null) { throw new InvalidUserDataException("Dependency insight report cannot be generated because the input configuration was not specified. " + "\nIt can be specified from the command line, e.g: '" + getPath() + " --configuration someConf --dependency someDep'"); } if (dependencySpec == null) { throw new InvalidUserDataException("Dependency insight report cannot be generated because the dependency to show was not specified." + "\nIt can be specified from the command line, e.g: '" + getPath() + " --dependency someDep'"); } } private Set<DependencyResult> selectDependencies(Configuration configuration, ResolutionErrorRenderer errorHandler) { ResolvableDependenciesInternal incoming = (ResolvableDependenciesInternal) configuration.getIncoming(); ResolutionResult result = incoming.getResolutionResult(errorHandler); final Set<DependencyResult> selectedDependencies = new LinkedHashSet<>(); result.allDependencies(dependencyResult -> { if (dependencySpec.isSatisfiedBy(dependencyResult)) { selectedDependencies.add(dependencyResult); } }); return selectedDependencies; } @SuppressWarnings("unchecked") private AttributeMatchDetails match(Attribute<?> actualAttribute, @Nullable Object actualValue, AttributeContainer requestedAttributes) { AttributesSchemaInternal schema = (AttributesSchemaInternal) getProject().getDependencies().getAttributesSchema(); // As far as I could tell, the only schema ever mixed in using withProducer is PreferJavaRuntimeVariant // However, that only adds disambiguation rules, which don't apply here. So this should be sufficient: AttributeMatcher matcher = schema.matcher(); for (Attribute<?> requested : requestedAttributes.keySet()) { Object requestedValue = requestedAttributes.getAttribute(requested); if (requested.getName().equals(actualAttribute.getName())) { // found an attribute with the same name, but they do not necessarily have the same type if (requested.equals(actualAttribute)) { if (Objects.equals(actualValue, requestedValue)) { return new AttributeMatchDetails(MatchType.REQUESTED, requested, requestedValue); } } else { // maybe it matched through coercion Object actualString = actualValue != null ? actualValue.toString() : null; Object requestedString = requestedValue != null ? requestedValue.toString() : null; if (Objects.equals(actualString, requestedString)) { return new AttributeMatchDetails(MatchType.REQUESTED, requested, requestedValue); } } // Coerce into the requested value, this is extremely hacky but it works if (requested.getType().isInstance(requestedValue) && actualValue instanceof String) { Object coerced = new CoercingStringValueSnapshot((String) actualValue, namedObjectInstantiator) .coerce(requested.getType()); if (coerced != null && matcher.isMatching((Attribute<Object>) requested, coerced, requestedValue)) { return new AttributeMatchDetails(MatchType.DIFFERENT_VALUE, requested, requestedValue); } } return new AttributeMatchDetails(MatchType.INCOMPATIBLE, requested, requestedValue); } } return new AttributeMatchDetails(MatchType.NOT_REQUESTED, null, null); } private static final class RootDependencyRenderer implements NodeRenderer { private final DependencyInsightReportTask task; private final Configuration configuration; private final ImmutableAttributesFactory attributesFactory; public RootDependencyRenderer(DependencyInsightReportTask task, Configuration configuration, ImmutableAttributesFactory attributesFactory) { this.task = task; this.configuration = configuration; this.attributesFactory = attributesFactory; } @Override public void renderNode(StyledTextOutput out, RenderableDependency dependency, boolean alreadyRendered) { out.withStyle(Identifier).text(dependency.getName()); if (StringUtils.isNotEmpty(dependency.getDescription())) { out.withStyle(Description).text(" (" + dependency.getDescription() + ")"); } switch (dependency.getResolutionState()) { case FAILED: out.withStyle(Failure).text(" FAILED"); break; case RESOLVED: case RESOLVED_CONSTRAINT: break; case UNRESOLVED: out.withStyle(Failure).text(" (n)"); break; } printVariantDetails(out, dependency); printExtraDetails(out, dependency); } private void printExtraDetails(StyledTextOutput out, RenderableDependency dependency) { List<Section> extraDetails = dependency.getExtraDetails(); if (!extraDetails.isEmpty()) { printSections(out, extraDetails, 1); } } private void printSections(StyledTextOutput out, List<Section> extraDetails, int depth) { for (Section extraDetail : extraDetails) { printSection(out, extraDetail, depth); printSections(out, extraDetail.getChildren(), depth + 1); } } private void printSection(StyledTextOutput out, Section extraDetail, int depth) { out.println(); String indent = StringUtils.leftPad("", 3 * depth) + (depth > 1 ? "- " : ""); String appendix = extraDetail.getChildren().isEmpty() ? "" : ":"; String description = StringUtils.trim(extraDetail.getDescription()); String padding = "\n" + StringUtils.leftPad("", indent.length()); description = description.replaceAll("(?m)(\r?\n)", padding); out.withStyle(Description).text(indent + description + appendix); } private void printVariantDetails(StyledTextOutput out, RenderableDependency dependency) { if (dependency.getResolvedVariants().isEmpty() && dependency.getAllVariants().isEmpty()) { return; } Set<String> selectedVariantNames = dependency.getResolvedVariants() .stream() .map(ResolvedVariantResult::getDisplayName) .collect(Collectors.toSet()); if (task.getShowingAllVariants().get()) { out.style(Header); out.println(); out.text("-------------------").println(); out.text("Selected Variant(s)").println(); out.text("-------------------"); out.style(Normal); } for (ResolvedVariantResult variant : dependency.getResolvedVariants()) { printVariant(out, dependency, variant, true); } if (task.getShowingAllVariants().get()) { out.style(Header); out.text("---------------------").println(); out.text("Unselected Variant(s)").println(); out.text("---------------------"); out.style(Normal); for (ResolvedVariantResult variant : dependency.getAllVariants()) { if (selectedVariantNames.contains(variant.getDisplayName())) { continue; } printVariant(out, dependency, variant, false); } } } private void printVariant(StyledTextOutput out, RenderableDependency dependency, ResolvedVariantResult variant, boolean selected) { out.println(); out.withStyle(Description).text("Variant \"" + variant.getDisplayName() + "\":"); out.println(); AttributeContainer attributes = variant.getAttributes(); AttributeContainer requested = getRequestedAttributes(configuration, dependency); if (!attributes.isEmpty() || !requested.isEmpty()) { writeAttributeBlock(out, attributes, requested, selected); } } private AttributeContainer getRequestedAttributes(Configuration configuration, RenderableDependency dependency) { if (dependency instanceof HasAttributes) { AttributeContainer dependencyAttributes = ((HasAttributes) dependency).getAttributes(); return concat(configuration.getAttributes(), dependencyAttributes); } return configuration.getAttributes(); } private AttributeContainer concat(AttributeContainer configAttributes, AttributeContainer dependencyAttributes) { return attributesFactory.concat( ((AttributeContainerInternal) configAttributes).asImmutable(), ((AttributeContainerInternal) dependencyAttributes).asImmutable()); } private void writeAttributeBlock(StyledTextOutput out, AttributeContainer attributes, AttributeContainer requested, boolean selected) { out.withStyle(Description).text(" Attributes:"); out.println(); new StyledTable.Renderer().render( createAttributeTable(attributes, requested, selected), out ); } private static final class AttributeBuckets { List<Attribute<?>> providedAttributes = new ArrayList<>(); Map<Attribute<?>, AttributeMatchDetails> bothAttributes = new LinkedHashMap<>(); List<Attribute<?>> requestedAttributes = new ArrayList<>(); } private StyledTable createAttributeTable(AttributeContainer attributes, AttributeContainer requested, boolean selected) { ImmutableList.Builder<String> header = ImmutableList.<String>builder() .add("Name", "Provided", "Requested"); if (!selected) { header.add("Compatibility"); } ImmutableList<StyledTable.Row> rows = buildRows(attributes, requested, selected); return new StyledTable(Strings.repeat(" ", 4), header.build(), rows); } private ImmutableList<StyledTable.Row> buildRows(AttributeContainer attributes, AttributeContainer requested, boolean selected) { AttributeBuckets buckets = bucketAttributes(attributes, requested); ImmutableList.Builder<StyledTable.Row> rows = ImmutableList.builder(); for (Attribute<?> attribute : buckets.providedAttributes) { rows.add(createProvidedRow(attributes, selected, attribute)); } for (Map.Entry<Attribute<?>, AttributeMatchDetails> entry : buckets.bothAttributes.entrySet()) { rows.add(createMatchBasedRow(attributes, selected, entry)); } for (Attribute<?> attribute : buckets.requestedAttributes) { rows.add(createRequestedRow(requested, selected, attribute)); } return rows.build(); } private AttributeBuckets bucketAttributes(AttributeContainer attributes, AttributeContainer requested) { // Bucket attributes into three groups: // 1. Attributes that are only in the variant // 2. Attributes that are both in the variant and requested by the configuration // 3. Attributes that are only in the requested configuration AttributeBuckets buckets = new AttributeBuckets(); for (Attribute<?> attribute : attributes.keySet()) { AttributeMatchDetails details = task.match(attribute, attributes.getAttribute(attribute), requested); if (details.matchType() != MatchType.NOT_REQUESTED) { buckets.bothAttributes.put(attribute, details); } else { buckets.providedAttributes.add(attribute); } } for (Attribute<?> attribute : requested.keySet()) { // If it's not in the matches, it's only in the requested attributes if (buckets.bothAttributes.values().stream().map(AttributeMatchDetails::requested).noneMatch(Predicate.isEqual(attribute))) { buckets.requestedAttributes.add(attribute); } } return buckets; } private StyledTable.Row createProvidedRow(AttributeContainer attributes, boolean selected, Attribute<?> attribute) { Object providedValue = attributes.getAttribute(attribute); ImmutableList.Builder<String> text = ImmutableList.<String>builder() .add( attribute.getName(), providedValue == null ? "" : providedValue.toString(), "" ); if (!selected) { text.add("Compatible"); } return new StyledTable.Row(text.build(), Info); } private StyledTable.Row createMatchBasedRow(AttributeContainer attributes, boolean selected, Map.Entry<Attribute<?>, AttributeMatchDetails> entry) { Object providedValue = attributes.getAttribute(entry.getKey()); AttributeMatchDetails match = entry.getValue(); ImmutableList.Builder<String> text = ImmutableList.<String>builder() .add( entry.getKey().getName(), providedValue == null ? "" : providedValue.toString(), String.valueOf(entry.getValue().requestedValue()) ); StyledTextOutput.Style style; switch (match.matchType()) { case REQUESTED: style = Success; break; case DIFFERENT_VALUE: style = AlternativeSuccess; break; case NOT_REQUESTED: style = Info; break; case INCOMPATIBLE: style = Failure; break; default: throw new IllegalStateException("Unknown match type: " + match.matchType()); } if (!selected) { text.add(match.matchType() == MatchType.INCOMPATIBLE ? "Incompatible" : "Compatible"); } return new StyledTable.Row(text.build(), style); } private StyledTable.Row createRequestedRow(AttributeContainer requested, boolean selected, Attribute<?> attribute) { Object requestedValue = requested.getAttribute(attribute); ImmutableList.Builder<String> text = ImmutableList.<String>builder() .add( attribute.getName(), "", String.valueOf(requestedValue) ); if (!selected) { text.add("Compatible"); } return new StyledTable.Row(text.build(), Info); } } private static class ReplaceProjectWithConfigurationNameRenderer implements NodeRenderer { private final Configuration configuration; public ReplaceProjectWithConfigurationNameRenderer(Configuration configuration) { this.configuration = configuration; } @Override public void renderNode(StyledTextOutput target, RenderableDependency node, boolean alreadyRendered) { boolean leaf = node.getChildren().isEmpty(); target.text(leaf ? configuration.getName() : node.getName()); if (node.getDescription() != null) { target.text(" ").withStyle(Description).text(node.getDescription()); } if (alreadyRendered && !leaf) { target.withStyle(Info).text(" (*)"); } } } }
Apply some output changes
subprojects/diagnostics/src/main/java/org/gradle/api/tasks/diagnostics/DependencyInsightReportTask.java
Apply some output changes
<ide><path>ubprojects/diagnostics/src/main/java/org/gradle/api/tasks/diagnostics/DependencyInsightReportTask.java <ide> } <ide> } <ide> <del> private void printVariant(StyledTextOutput out, RenderableDependency dependency, ResolvedVariantResult variant, boolean selected) { <del> out.println(); <del> out.withStyle(Description).text("Variant \"" + variant.getDisplayName() + "\":"); <del> out.println(); <add> private void printVariant( <add> StyledTextOutput out, RenderableDependency dependency, ResolvedVariantResult variant, boolean selected <add> ) { <ide> AttributeContainer attributes = variant.getAttributes(); <ide> AttributeContainer requested = getRequestedAttributes(configuration, dependency); <add> AttributeBuckets buckets = bucketAttributes(attributes, requested); <add> <add> out.println().style(Normal).text("Variant "); <add> <add> // Style the name based on whether it is selected or not. <add> if (selected) { <add> out.style(Success); <add> } else if (buckets.bothAttributes.values().stream().noneMatch(v -> v.matchType() == MatchType.INCOMPATIBLE)) { <add> out.style(AlternativeSuccess); <add> } else { <add> out.style(Failure); <add> } <add> out.text(variant.getDisplayName()).style(Normal).text(":").println(); <ide> if (!attributes.isEmpty() || !requested.isEmpty()) { <del> writeAttributeBlock(out, attributes, requested, selected); <add> writeAttributeBlock(out, attributes, requested, buckets, selected); <ide> } <ide> } <ide> <ide> ((AttributeContainerInternal) dependencyAttributes).asImmutable()); <ide> } <ide> <del> private void writeAttributeBlock(StyledTextOutput out, AttributeContainer attributes, AttributeContainer requested, boolean selected) { <del> out.withStyle(Description).text(" Attributes:"); <del> out.println(); <add> private void writeAttributeBlock( <add> StyledTextOutput out, AttributeContainer attributes, AttributeContainer requested, <add> AttributeBuckets buckets, boolean selected <add> ) { <ide> new StyledTable.Renderer().render( <del> createAttributeTable(attributes, requested, selected), <add> createAttributeTable(attributes, requested, buckets, selected), <ide> out <ide> ); <ide> } <ide> List<Attribute<?>> requestedAttributes = new ArrayList<>(); <ide> } <ide> <del> private StyledTable createAttributeTable(AttributeContainer attributes, AttributeContainer requested, boolean selected) { <add> private StyledTable createAttributeTable( <add> AttributeContainer attributes, AttributeContainer requested, AttributeBuckets buckets, boolean selected <add> ) { <ide> ImmutableList.Builder<String> header = ImmutableList.<String>builder() <del> .add("Name", "Provided", "Requested"); <add> .add("Attribute Name", "Provided", "Requested"); <ide> if (!selected) { <ide> header.add("Compatibility"); <ide> } <ide> <del> ImmutableList<StyledTable.Row> rows = buildRows(attributes, requested, selected); <del> <del> return new StyledTable(Strings.repeat(" ", 4), header.build(), rows); <del> } <del> <del> private ImmutableList<StyledTable.Row> buildRows(AttributeContainer attributes, AttributeContainer requested, boolean selected) { <del> AttributeBuckets buckets = bucketAttributes(attributes, requested); <del> <add> ImmutableList<StyledTable.Row> rows = buildRows(attributes, requested, buckets, selected); <add> <add> return new StyledTable(Strings.repeat(" ", 2), header.build(), rows); <add> } <add> <add> private ImmutableList<StyledTable.Row> buildRows( <add> AttributeContainer attributes, AttributeContainer requested, AttributeBuckets buckets, boolean selected <add> ) { <ide> ImmutableList.Builder<StyledTable.Row> rows = ImmutableList.builder(); <ide> for (Attribute<?> attribute : buckets.providedAttributes) { <ide> rows.add(createProvidedRow(attributes, selected, attribute));
Java
apache-2.0
35c4a2798155ccd3060cbd3a57b3d2a900e54122
0
diegoRodriguezAguila/Cobranza.Elfec.Mobile
package com.elfec.cobranza.business_logic.printer; import java.util.List; import java.util.Locale; import org.apache.commons.lang.WordUtils; import org.joda.time.DateTime; import org.joda.time.Days; import com.elfec.cobranza.business_logic.ConceptManager; import com.elfec.cobranza.business_logic.PrinterImagesManager; import com.elfec.cobranza.business_logic.SessionManager; import com.elfec.cobranza.helpers.text_format.AccountFormatter; import com.elfec.cobranza.helpers.utils.AmountsCounter; import com.elfec.cobranza.model.Category; import com.elfec.cobranza.model.CollectionPayment; import com.elfec.cobranza.model.Concept; import com.elfec.cobranza.model.CoopReceipt; import com.elfec.cobranza.model.Supply; import com.elfec.cobranza.model.SupplyStatus; import com.elfec.cobranza.model.printer.CPCLCommand; import com.elfec.cobranza.model.printer.CPCLCommand.Justify; import com.elfec.cobranza.model.printer.CPCLCommand.QRQuality; import com.elfec.cobranza.model.printer.CPCLCommand.Unit; import com.elfec.cobranza.model.printer.PrintConcept; import com.elfec.cobranza.settings.ParameterSettingsManager; import com.elfec.cobranza.settings.ParameterSettingsManager.ParamKey; /** * Clase que se encarga de generar el comando de impresin de una factura * @author drodriguez * */ public class ReceiptGenerator { /** * Define el espacio entre lineas de textos multilinea, en CM */ private static final double SP_FACTOR = 0.37; /** * Define el tamao mximo de caracteres que puede ocupar una lnea de texto */ private static final int WRAP_LIMIT = 30; /** * El limite de tamao de un concepto */ private static final int CONCEPT_WRAP_LIMIT = 34; /** * Define el tamao mximo de caracteres que puede ocupar una lnea de texto del literal de la factura */ private static final int LITERAL_WRAP_LIMIT = 66; /** * Define el tamao mximo de caracteres que puede ocupar una lnea de texto del footer */ private static final int FOOTER_WRAP_LIMIT = 42; /** * El NIT de Elfec */ private static final String ELFEC_NIT = "1023213028"; /** * El espacio extra en la informacin de la factura */ private static double rcptDataExtraSpacing; /** * El tamao de la factura en cm */ private static double receiptHeight; /** * Booleano que indica si es que se debe usar el nuevo formato */ private static boolean isNewFormat; /** * Genera el comando cpcl de impresin del recibo * @param receipt * @return */ public static CPCLCommand generateCommand(CoopReceipt receipt, long internalControlCode) { rcptDataExtraSpacing = 0; receiptHeight = 0; isNewFormat = (Days.daysBetween( ParameterSettingsManager.getParameter(ParamKey.SFV_DATE).getDateTimeValue(), DateTime.now()).getDays()>=0); CPCLCommand command = new CPCLCommand(200, 400, 11.5).inUnit(Unit.IN_CENTIMETERS ); assignHeaderData(command, receipt); assignReceiptData(command, receipt); assignReceiptDetails(command, receipt); assignFooterData(command, receipt, internalControlCode); command.setLabelHeight(receiptHeight+0.8); command.print(); return command; } /** * Asigna la informacin de la factura a la cabecera en el comando de la impresora * @param command */ private static void assignHeaderData(CPCLCommand command, CoopReceipt receipt) { command.justify(Justify.CENTER) .image(0, receiptHeight, PrinterImagesManager.HEADER_IMAGE_IN_PRINTER_NAME) .text("TAHOMA15.CPF", 0, 0, receiptHeight+=4.1, 0.049, 0.076, "FACTURA ORIGINAL"); if(!isNewFormat) command.text("TAHOMA8P.CPF", 0, 0, receiptHeight+=0.75, receipt.getAuthorizationDescription()); double boxStartY = receiptHeight += (isNewFormat?0.75:SP_FACTOR); command.setFont("TAHOMA11.CPF") .multilineText(0.44, 0, 0, receiptHeight+=0.15, "NIT: "+ELFEC_NIT, "FACTURA No.: "+receipt.getReceiptNumber(), "AUTORIZACIN: "+receipt.getAuthorizationNumber()) .setFont("TAHOMA8P.CPF") .text( 0, 0, receiptHeight+=1.5, 0.025, 0.025, "Actividad Econmica:") .text( 0, 0, receiptHeight+=0.35, "Venta de Energa Elctrica") .justify(Justify.LEFT) .box(0.4, boxStartY, 10.05, receiptHeight+=0.55, 0.02); } /** * Asigna la informacin de la factura al segundo sector de la factura en el comando de la impresora * @param command */ private static void assignReceiptData(CPCLCommand command, CoopReceipt receipt) { double boxStartY = receiptHeight-0.02; double startY = receiptHeight+=0.15; double extraSP = assignReceiptRightData(command, receipt, startY); assignReceiptLeftData(command, receipt); receiptHeight = Math.max((startY+extraSP), (receiptHeight+0.15))+rcptDataExtraSpacing; command.justify(Justify.LEFT) .box(0.4, boxStartY, 10.05, receiptHeight, 0.02); } /** * Asigna la informacin de la columna izquierda de la factura al segundo sector de la factura en el comando de la impresora * @param command */ private static void assignReceiptLeftData(CPCLCommand command, CoopReceipt receipt) { String clientName = wrapName(Supply.findSupplyByNUSOrAccount(receipt.getSupplyId(), receipt.getSupplyNumber()).getClientName()); String clientAddress = wrapAddress("DIRECCIN: "+receipt.getClientAddress()); double extraSpacing = ((clientName.split("\r\n").length-1)*SP_FACTOR); command.justify(Justify.LEFT, 3) .setFont("TAHOMA8P.CPF") .text(0, 0.6, receiptHeight, 0.03, 0.03, "FECHA EMISIN:") .text(0, 3.3, receiptHeight, receipt.getIssueDate().toString("dd/MM/yyyy")) .text("TAHOMA11.CPF", 0, 0.6, receiptHeight+=0.35, 0.035, 0.035, "NUS:") .text("TAHOMA11.CPF", 0, 1.6, receiptHeight, 0.035, 0.035, ""+receipt.getSupplyId()) .text(0, 0.6, receiptHeight+=0.5, 0.03, 0.03, "CUENTA:") .text(0, 2, receiptHeight, AccountFormatter.formatAccountNumber(receipt.getSupplyNumber())) .text(0, 0.6, receiptHeight+=0.35, 0.03, 0.03, "NOMBRE:") .multilineText(SP_FACTOR, 0, 2.05, receiptHeight, clientName) .multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=(0.35+extraSpacing), "NIT/CI: "+receipt.getNIT(), clientAddress, "CATEGORA: "+Category.getFullCategoryDesc(receipt.getCategoryId()), "MEDIDOR: "+receipt.getMeterNumber()); extraSpacing = (extraSpacing + ((clientAddress.split("\r\n").length-1)*SP_FACTOR))-(2*SP_FACTOR); rcptDataExtraSpacing = extraSpacing>0?extraSpacing:0; } /** * Asigna la informacin de la columna derecha de la factura al * segundo sector de la factura en el comando de la impresora * @param command * @return extraSpacing */ private static double assignReceiptRightData(CPCLCommand command, CoopReceipt receipt, double startY) { SupplyStatus powerSupplyStatus = receipt.getPowerSupplyStatus(); DateTime period = new DateTime(receipt.getYear(), receipt.getPeriodNumber(),1,0,0); int daysPastDue = Days.daysBetween(receipt.getExpirationDate(), DateTime.now()).getDays(); String readings = getReadings(receipt.getSupplyStatusSet().getSupplyStatusList()); double extraSpacing = (readings.split("\r\n").length+8)*SP_FACTOR; CollectionPayment payment = receipt.getActiveCollectionPayment(); command.justify(Justify.LEFT, 4.6) .setFont("TAHOMA8P.CPF") .multilineText(SP_FACTOR, 0, 5.6, startY, "CONSUMO (kWh): "+receipt.getSupplyStatusSet().getBilledConsume(), "POTENCIA: "+(powerSupplyStatus==null?0:powerSupplyStatus.getBilledConsume()), "PERIODO: "+period.toString("MMM/yyyy").toUpperCase(Locale.getDefault()), "DE: "+receipt.getSupplyStatusSet().getLastReadingDate().toString("dd/MM/yyyy") +" A: "+receipt.getSupplyStatusSet().getDate().toString("dd/MM/yyyy"), readings, "FECHA PAGO: "+((payment==null?DateTime.now(): payment.getPaymentDate()).toString("dd/MM/yyyy HH:mm")), "VENCIMIENTO: "+receipt.getExpirationDate().toString("dd/MM/yyyy"), "DIAS MOROSIDAD: "+(daysPastDue<0?0:daysPastDue), "PRXIMA EMISIN: "+receipt.getIssueDate().plusDays(33).toString("dd/MM/yyyy")); return extraSpacing; } /** * Obtiene las lecturas anteriores y actuales de los medidores del suministro de la factura * @param supplyStatuses * @return lecturas anteriores y actuales */ private static String getReadings(List<SupplyStatus> supplyStatuses) { StringBuilder str = new StringBuilder(); int count = 0; int size = supplyStatuses.size(); for (SupplyStatus supplyStatus : supplyStatuses) { count++; str.append("LECTURA ANTERIOR: ").append(supplyStatus.getLastReading()).append("\r\n") .append("LECTURA ACTUAL: ").append(supplyStatus.getReading()); if(count<size) str.append("\r\n"); } return str.toString(); } /** * Asigna los conceptos y sus importes de la factura * @param command * @param receipt */ private static void assignReceiptDetails(CPCLCommand command, CoopReceipt receipt) { double boxStartY = receiptHeight-0.02; receiptHeight+=0.15; command.justify(Justify.LEFT, 6.7) .setFont("TAHOMA8P.CPF") .text(0, 1, receiptHeight, 0.03, 0.03, "DETALLE") .justify(Justify.RIGHT, 9) .text(0, 7, receiptHeight, 0.03, 0.03, "IMPORTE"); receiptHeight+=0.45; assignTotalConsumeConcepts(command, receipt); receiptHeight+=0.1; assignTotalSupplyConcepts(command, receipt); receiptHeight+=0.1; assignTotalReceiptConcepts(command, receipt); command.justify(Justify.LEFT).box(0.4, boxStartY, 10.05, receiptHeight+=0.15, 0.02); } /** * Muestra los conceptos de TOTAL CONSUMO * @param command * @param receipt */ private static void assignTotalConsumeConcepts(CPCLCommand command, CoopReceipt receipt) { List<PrintConcept> concepts = ConceptManager.getAllTotalConsumeConcepts(receipt.getReceiptId()); int size = concepts.size(); String[] conceptDescs = new String[size]; String[] conceptAmounts = new String[size]; int totalExtraRows = getPrintableWrappedConceptArrays(concepts, conceptDescs, conceptAmounts); double startY = receiptHeight; receiptHeight += (size+totalExtraRows)*SP_FACTOR; command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, startY, conceptDescs) .line(1, receiptHeight, 6.7, receiptHeight, 0.02) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, startY, conceptAmounts) .justify(Justify.LEFT) .line(7.4, receiptHeight, 9, receiptHeight, 0.02); } /** * Muestra los conceptos de TOTAL SUMINISTRO * @param command * @param receipt */ private static void assignTotalSupplyConcepts(CPCLCommand command, CoopReceipt receipt) { List<PrintConcept> concepts = ConceptManager.getAllTotalSupplyConcepts(receipt.getReceiptId()); int size = concepts.size(); String[] conceptDescs = new String[size]; String[] conceptAmounts = new String[size]; int totalExtraRows = getPrintableWrappedConceptArrays(concepts, conceptDescs, conceptAmounts); double startY = receiptHeight; receiptHeight += (size+totalExtraRows)*SP_FACTOR; command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, startY, conceptDescs) .line(1, receiptHeight, 6.7, receiptHeight, 0.02) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, startY, conceptAmounts) .justify(Justify.LEFT) .line(7.4, receiptHeight, 9, receiptHeight, 0.02); } /** * Muestra los conceptos de TOTAL FACTURA * @param command * @param receipt */ private static void assignTotalReceiptConcepts(CPCLCommand command, CoopReceipt receipt) { List<PrintConcept> concepts = ConceptManager.getAllTotalReceiptConcepts(receipt); int size = concepts.size(); String[] conceptDescs = new String[size]; String[] conceptAmounts = new String[size]; int totalExtraRows = getPrintableWrappedConceptArrays(concepts, conceptDescs, conceptAmounts); double startY = receiptHeight; command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, receiptHeight, conceptDescs[0]) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, receiptHeight, conceptAmounts[0]); double spacing = (conceptDescs[0].split("\r\n").length)*SP_FACTOR; command.justify(Justify.LEFT, 6.7) .setBold(0.03).setSpacing(0.03) .multilineText(SP_FACTOR, 0, 1, receiptHeight+=spacing, conceptDescs[1]) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, receiptHeight, conceptAmounts[1]) .setBold(0).setSpacing(0); spacing = (conceptDescs[1].split("\r\n").length)*SP_FACTOR; receiptHeight+=spacing; assignFineBonusConcepts(command, receipt); command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, receiptHeight, conceptDescs[2]) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, receiptHeight, conceptAmounts[2]); spacing = (conceptDescs[2].split("\r\n").length)*SP_FACTOR; receiptHeight = startY+((size+totalExtraRows)*SP_FACTOR)+spacing; } /** * Muestra la lista de conceptos de devoluciones y de bonificaciones multas * @param command * @param receipt */ private static void assignFineBonusConcepts(CPCLCommand command, CoopReceipt receipt) { List<PrintConcept> fineBonusConcepts = Concept.getFineBonusConcepts(receipt.getReceiptId()); int size = fineBonusConcepts.size(); if(size>0) { String[] conceptDescs = new String[size]; String[] conceptAmounts = new String[size]; int totalExtraRows = getPrintableWrappedConceptArrays(fineBonusConcepts, conceptDescs, conceptAmounts); command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, receiptHeight, conceptDescs) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, receiptHeight, conceptAmounts); receiptHeight += (size+totalExtraRows)*SP_FACTOR; } } /** * Asigna la informacin del final de la factura * @param command * @param receipt */ private static void assignFooterData(CPCLCommand command, CoopReceipt receipt, long internalControlCode) { String selectedMsg = wrapFooterContent( ParameterSettingsManager.getParameter(isNewFormat?ParamKey.NEW_MSG:ParamKey.OLD_MSG).getStringValue()); String enterpriseMsg = wrapFooterContent( ParameterSettingsManager.getParameter(ParamKey.ENTERPRISE_MSG).getStringValue()); String literal = wrapLiteral(receipt.getLiteral()+" Bolivianos"); String authDesc = wrapFooterContent(receipt.getAuthorizationDescription()); command.justify(Justify.LEFT, 8) .setFont("TAHOMA8P.CPF") .text(0, 0.6, receiptHeight+=0.15, 0.03, 0.03, "Son:") .multilineText(SP_FACTOR, 0, 1.4, receiptHeight, literal); int spaces = (literal.split("\r\n").length); command.multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=((SP_FACTOR*spaces)+0.1), ("CDIGO DE CONTROL: "+receipt.getControlCode()), ("FECHA LMITE DE EMISIN: "+receipt.getAuthExpirationDate().toString("dd/MM/yyyy"))) .text(0, 7.3, receiptHeight, 0.03, 0.03, "CAJA/"+SessionManager.getLoggedCashdeskNumber()+":"+internalControlCode) .justify(Justify.CENTER, 7.3) .multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=((SP_FACTOR*2)+0.05), enterpriseMsg); spaces = (enterpriseMsg.split("\r\n").length); command.multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=((SP_FACTOR*spaces)+0.3), selectedMsg); spaces = (selectedMsg.split("\r\n").length); if(isNewFormat) command.multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=((SP_FACTOR*spaces)+0.2), authDesc); receiptHeight+=(isNewFormat?(authDesc.split("\r\n").length*SP_FACTOR):(SP_FACTOR*spaces)); generateQR(command, receipt); command.justify(Justify.CENTER).image(0, receiptHeight+=0.3, PrinterImagesManager.FOOTER_IMAGE_IN_PRINTER_NAME); receiptHeight+=5.6; } /** * Genera el cdigo QR de la factura * @param command * @param receipt */ private static void generateQR(CPCLCommand command, CoopReceipt receipt) { String textToCode = ELFEC_NIT+"|"+receipt.getReceiptNumber()+"|"+ receipt.getAuthorizationNumber()+"|"+receipt.getIssueDate().toString("dd/MM/yyyy")+"|"+ AmountsCounter.formatBigDecimal(receipt.getTotalAmount())+"|"+ AmountsCounter.formatBigDecimal(Concept.getSubjectToTaxCreditConcept(receipt.getReceiptId()).getAmount())+"|"+ receipt.getControlCode()+"|"+receipt.getNIT()+"|0|0|"+ AmountsCounter.formatBigDecimal(Concept.getNotSubjectToTaxCreditConcept(receipt.getReceiptId()).getAmount())+"|0"; command.QR(5, 7.5, receiptHeight-2.5, QRQuality.M, textToCode); } /** * Wrapea la cadena del literal tomando en cuenta los 7.3 de limite que se tiene * subsecuente * @param literal * @return la cadena con lineas de salto respentando el LITERAL_WRAP_LIMIT */ private static String wrapLiteral(String literal) { return WordUtils.wrap(literal, LITERAL_WRAP_LIMIT).replace("\n", "\r\n"); } /** * Wrapea la cadena del footer tomando en cuenta los 7.3 de limite que se tiene * subsecuente * @param footerMsg * @return la cadena con lineas de salto respentando el WRAP_LIMIT */ private static String wrapFooterContent(String footerMsg) { return WordUtils.wrap(footerMsg, FOOTER_WRAP_LIMIT).replace("\n", "\r\n"); } /** * Si es necesario parte la descripcion del concepto en pedazos para que se impriman de forma * subsecuente * @param conceptDesc * @return la cadena con lineas de salto respentando el CONCEPT_WRAP_LIMIT */ private static String wrapConcept(String conceptDesc) { return WordUtils.wrap(conceptDesc, CONCEPT_WRAP_LIMIT).replace("\n", "\r\n"); } /** * Si es necesario parte el nombre en pedazos para que se impriman de forma * subsecuente * @param name * @return la cadena con lineas de salto respentando el WRAP_LIMIT */ private static String wrapName(String name) { return WordUtils.wrap(name, WRAP_LIMIT-9).replace("\n", "\r\n"); } /** * Si es necesario parte la direccin en pedazos para que se impriman de forma * subsecuente * @param fullAddress * @return la cadena con lineas de salto respentando el WRAP_LIMIT */ private static String wrapAddress(String fullAddress) { return WordUtils.wrap(fullAddress, WRAP_LIMIT).replace("\n", "\r\n"); } /** * Arma los arrays imprimibles tomando en cuenta el wrap del texto del tamao limite * @param concepts la lista de conceptos de la que se quieren armar sus arrays imprimibles * @param conceptDescs el array de descripciones con las filas formateadas * @param conceptAmounts el array de importes con las filas formateadas * @return la cantidad de filas extra que se aumentaron */ private static int getPrintableWrappedConceptArrays(List<PrintConcept> concepts, String[] conceptDescs, String[] conceptAmounts) { int size = concepts.size(); int totalExtraRows = 0, extraRows; for (int i = 0; i < size; i++) { conceptDescs[i] = wrapConcept(concepts.get(i).getDescription()); extraRows = conceptDescs[i].split("\r\n").length-1; totalExtraRows+=extraRows; conceptAmounts[i] = AmountsCounter.formatBigDecimal(concepts.get(i).getAmount()); for (int j = 0; j < extraRows; j++) { conceptAmounts[i] = conceptAmounts[i]+"\r\n "; } } return totalExtraRows; } }
src/com/elfec/cobranza/business_logic/printer/ReceiptGenerator.java
package com.elfec.cobranza.business_logic.printer; import java.util.List; import java.util.Locale; import org.apache.commons.lang.WordUtils; import org.joda.time.DateTime; import org.joda.time.Days; import com.elfec.cobranza.business_logic.ConceptManager; import com.elfec.cobranza.business_logic.PrinterImagesManager; import com.elfec.cobranza.business_logic.SessionManager; import com.elfec.cobranza.helpers.text_format.AccountFormatter; import com.elfec.cobranza.helpers.utils.AmountsCounter; import com.elfec.cobranza.model.Category; import com.elfec.cobranza.model.Concept; import com.elfec.cobranza.model.CoopReceipt; import com.elfec.cobranza.model.Supply; import com.elfec.cobranza.model.SupplyStatus; import com.elfec.cobranza.model.printer.CPCLCommand; import com.elfec.cobranza.model.printer.CPCLCommand.Justify; import com.elfec.cobranza.model.printer.CPCLCommand.QRQuality; import com.elfec.cobranza.model.printer.CPCLCommand.Unit; import com.elfec.cobranza.model.printer.PrintConcept; import com.elfec.cobranza.settings.ParameterSettingsManager; import com.elfec.cobranza.settings.ParameterSettingsManager.ParamKey; /** * Clase que se encarga de generar el comando de impresin de una factura * @author drodriguez * */ public class ReceiptGenerator { /** * Define el espacio entre lineas de textos multilinea, en CM */ private static final double SP_FACTOR = 0.37; /** * Define el tamao mximo de caracteres que puede ocupar una lnea de texto */ private static final int WRAP_LIMIT = 30; /** * El limite de tamao de un concepto */ private static final int CONCEPT_WRAP_LIMIT = 34; /** * Define el tamao mximo de caracteres que puede ocupar una lnea de texto del literal de la factura */ private static final int LITERAL_WRAP_LIMIT = 66; /** * Define el tamao mximo de caracteres que puede ocupar una lnea de texto del footer */ private static final int FOOTER_WRAP_LIMIT = 42; /** * El NIT de Elfec */ private static final String ELFEC_NIT = "1023213028"; /** * El espacio extra en la informacin de la factura */ private static double rcptDataExtraSpacing; /** * El tamao de la factura en cm */ private static double receiptHeight; /** * Booleano que indica si es que se debe usar el nuevo formato */ private static boolean isNewFormat; /** * Genera el comando cpcl de impresin del recibo * @param receipt * @return */ public static CPCLCommand generateCommand(CoopReceipt receipt, long internalControlCode) { rcptDataExtraSpacing = 0; receiptHeight = 0; isNewFormat = (Days.daysBetween( ParameterSettingsManager.getParameter(ParamKey.SFV_DATE).getDateTimeValue(), DateTime.now()).getDays()>=0); CPCLCommand command = new CPCLCommand(200, 400, 11.5).inUnit(Unit.IN_CENTIMETERS ); assignHeaderData(command, receipt); assignReceiptData(command, receipt); assignReceiptDetails(command, receipt); assignFooterData(command, receipt, internalControlCode); command.setLabelHeight(receiptHeight+0.8); command.print(); return command; } /** * Asigna la informacin de la factura a la cabecera en el comando de la impresora * @param command */ private static void assignHeaderData(CPCLCommand command, CoopReceipt receipt) { command.justify(Justify.CENTER) .image(0, receiptHeight, PrinterImagesManager.HEADER_IMAGE_IN_PRINTER_NAME) .text("TAHOMA15.CPF", 0, 0, receiptHeight+=4.1, 0.049, 0.076, "FACTURA ORIGINAL"); if(!isNewFormat) command.text("TAHOMA8P.CPF", 0, 0, receiptHeight+=0.75, receipt.getAuthorizationDescription()); double boxStartY = receiptHeight += (isNewFormat?0.75:SP_FACTOR); command.setFont("TAHOMA11.CPF") .multilineText(0.44, 0, 0, receiptHeight+=0.15, "NIT: "+ELFEC_NIT, "FACTURA No.: "+receipt.getReceiptNumber(), "AUTORIZACIN: "+receipt.getAuthorizationNumber()) .setFont("TAHOMA8P.CPF") .text( 0, 0, receiptHeight+=1.5, 0.025, 0.025, "Actividad Econmica:") .text( 0, 0, receiptHeight+=0.35, "Venta de Energa Elctrica") .justify(Justify.LEFT) .box(0.4, boxStartY, 10.05, receiptHeight+=0.55, 0.02); } /** * Asigna la informacin de la factura al segundo sector de la factura en el comando de la impresora * @param command */ private static void assignReceiptData(CPCLCommand command, CoopReceipt receipt) { double boxStartY = receiptHeight-0.02; double startY = receiptHeight+=0.15; double extraSP = assignReceiptRightData(command, receipt, startY); assignReceiptLeftData(command, receipt); receiptHeight = Math.max((startY+extraSP), (receiptHeight+0.15))+rcptDataExtraSpacing; command.justify(Justify.LEFT) .box(0.4, boxStartY, 10.05, receiptHeight, 0.02); } /** * Asigna la informacin de la columna izquierda de la factura al segundo sector de la factura en el comando de la impresora * @param command */ private static void assignReceiptLeftData(CPCLCommand command, CoopReceipt receipt) { String clientName = wrapName(Supply.findSupplyByNUSOrAccount(receipt.getSupplyId(), receipt.getSupplyNumber()).getClientName()); String clientAddress = wrapAddress("DIRECCIN: "+receipt.getClientAddress()); double extraSpacing = ((clientName.split("\r\n").length-1)*SP_FACTOR); command.justify(Justify.LEFT, 3) .setFont("TAHOMA8P.CPF") .text(0, 0.6, receiptHeight, 0.03, 0.03, "FECHA EMISIN:") .text(0, 3.3, receiptHeight, receipt.getIssueDate().toString("dd/MM/yyyy")) .text("TAHOMA11.CPF", 0, 0.6, receiptHeight+=0.35, 0.035, 0.035, "NUS:") .text("TAHOMA11.CPF", 0, 1.6, receiptHeight, 0.035, 0.035, ""+receipt.getSupplyId()) .text(0, 0.6, receiptHeight+=0.5, 0.03, 0.03, "CUENTA:") .text(0, 2, receiptHeight, AccountFormatter.formatAccountNumber(receipt.getSupplyNumber())) .text(0, 0.6, receiptHeight+=0.35, 0.03, 0.03, "NOMBRE:") .multilineText(SP_FACTOR, 0, 2.05, receiptHeight, clientName) .multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=(0.35+extraSpacing), "NIT/CI: "+receipt.getNIT(), clientAddress, "CATEGORA: "+Category.getFullCategoryDesc(receipt.getCategoryId()), "MEDIDOR: "+receipt.getMeterNumber()); extraSpacing = (extraSpacing + ((clientAddress.split("\r\n").length-1)*SP_FACTOR))-(2*SP_FACTOR); rcptDataExtraSpacing = extraSpacing>0?extraSpacing:0; } /** * Asigna la informacin de la columna derecha de la factura al * segundo sector de la factura en el comando de la impresora * @param command * @return extraSpacing */ private static double assignReceiptRightData(CPCLCommand command, CoopReceipt receipt, double startY) { SupplyStatus powerSupplyStatus = receipt.getPowerSupplyStatus(); DateTime period = new DateTime(receipt.getYear(), receipt.getPeriodNumber(),1,0,0); int daysPastDue = Days.daysBetween(receipt.getExpirationDate(), DateTime.now()).getDays(); String readings = getReadings(receipt.getSupplyStatusSet().getSupplyStatusList()); double extraSpacing = (readings.split("\r\n").length+8)*SP_FACTOR; command.justify(Justify.LEFT, 4.6) .setFont("TAHOMA8P.CPF") .multilineText(SP_FACTOR, 0, 5.6, startY, "CONSUMO (kWh): "+receipt.getSupplyStatusSet().getBilledConsume(), "POTENCIA: "+(powerSupplyStatus==null?0:powerSupplyStatus.getBilledConsume()), "PERIODO: "+period.toString("MMM/yyyy").toUpperCase(Locale.getDefault()), "DE: "+receipt.getSupplyStatusSet().getLastReadingDate().toString("dd/MM/yyyy") +" A: "+receipt.getSupplyStatusSet().getDate().toString("dd/MM/yyyy"), readings, "FECHA PAGO: "+(DateTime.now().toString("dd/MM/yyyy HH:mm")), "VENCIMIENTO: "+receipt.getExpirationDate().toString("dd/MM/yyyy"), "DIAS MOROSIDAD: "+(daysPastDue<0?0:daysPastDue), "PRXIMA EMISIN: "+receipt.getIssueDate().plusDays(33).toString("dd/MM/yyyy")); return extraSpacing; } /** * Obtiene las lecturas anteriores y actuales de los medidores del suministro de la factura * @param supplyStatuses * @return lecturas anteriores y actuales */ private static String getReadings(List<SupplyStatus> supplyStatuses) { StringBuilder str = new StringBuilder(); int count = 0; int size = supplyStatuses.size(); for (SupplyStatus supplyStatus : supplyStatuses) { count++; str.append("LECTURA ANTERIOR: ").append(supplyStatus.getLastReading()).append("\r\n") .append("LECTURA ACTUAL: ").append(supplyStatus.getReading()); if(count<size) str.append("\r\n"); } return str.toString(); } /** * Asigna los conceptos y sus importes de la factura * @param command * @param receipt */ private static void assignReceiptDetails(CPCLCommand command, CoopReceipt receipt) { double boxStartY = receiptHeight-0.02; receiptHeight+=0.15; command.justify(Justify.LEFT, 6.7) .setFont("TAHOMA8P.CPF") .text(0, 1, receiptHeight, 0.03, 0.03, "DETALLE") .justify(Justify.RIGHT, 9) .text(0, 7, receiptHeight, 0.03, 0.03, "IMPORTE"); receiptHeight+=0.45; assignTotalConsumeConcepts(command, receipt); receiptHeight+=0.1; assignTotalSupplyConcepts(command, receipt); receiptHeight+=0.1; assignTotalReceiptConcepts(command, receipt); command.justify(Justify.LEFT).box(0.4, boxStartY, 10.05, receiptHeight+=0.15, 0.02); } /** * Muestra los conceptos de TOTAL CONSUMO * @param command * @param receipt */ private static void assignTotalConsumeConcepts(CPCLCommand command, CoopReceipt receipt) { List<PrintConcept> concepts = ConceptManager.getAllTotalConsumeConcepts(receipt.getReceiptId()); int size = concepts.size(); String[] conceptDescs = new String[size]; String[] conceptAmounts = new String[size]; int totalExtraRows = getPrintableWrappedConceptArrays(concepts, conceptDescs, conceptAmounts); double startY = receiptHeight; receiptHeight += (size+totalExtraRows)*SP_FACTOR; command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, startY, conceptDescs) .line(1, receiptHeight, 6.7, receiptHeight, 0.02) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, startY, conceptAmounts) .justify(Justify.LEFT) .line(7.4, receiptHeight, 9, receiptHeight, 0.02); } /** * Muestra los conceptos de TOTAL SUMINISTRO * @param command * @param receipt */ private static void assignTotalSupplyConcepts(CPCLCommand command, CoopReceipt receipt) { List<PrintConcept> concepts = ConceptManager.getAllTotalSupplyConcepts(receipt.getReceiptId()); int size = concepts.size(); String[] conceptDescs = new String[size]; String[] conceptAmounts = new String[size]; int totalExtraRows = getPrintableWrappedConceptArrays(concepts, conceptDescs, conceptAmounts); double startY = receiptHeight; receiptHeight += (size+totalExtraRows)*SP_FACTOR; command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, startY, conceptDescs) .line(1, receiptHeight, 6.7, receiptHeight, 0.02) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, startY, conceptAmounts) .justify(Justify.LEFT) .line(7.4, receiptHeight, 9, receiptHeight, 0.02); } /** * Muestra los conceptos de TOTAL FACTURA * @param command * @param receipt */ private static void assignTotalReceiptConcepts(CPCLCommand command, CoopReceipt receipt) { List<PrintConcept> concepts = ConceptManager.getAllTotalReceiptConcepts(receipt); int size = concepts.size(); String[] conceptDescs = new String[size]; String[] conceptAmounts = new String[size]; int totalExtraRows = getPrintableWrappedConceptArrays(concepts, conceptDescs, conceptAmounts); double startY = receiptHeight; command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, receiptHeight, conceptDescs[0]) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, receiptHeight, conceptAmounts[0]); double spacing = (conceptDescs[0].split("\r\n").length)*SP_FACTOR; command.justify(Justify.LEFT, 6.7) .setBold(0.03).setSpacing(0.03) .multilineText(SP_FACTOR, 0, 1, receiptHeight+=spacing, conceptDescs[1]) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, receiptHeight, conceptAmounts[1]) .setBold(0).setSpacing(0); spacing = (conceptDescs[1].split("\r\n").length)*SP_FACTOR; receiptHeight+=spacing; assignFineBonusConcepts(command, receipt); command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, receiptHeight, conceptDescs[2]) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, receiptHeight, conceptAmounts[2]); spacing = (conceptDescs[2].split("\r\n").length)*SP_FACTOR; receiptHeight = startY+((size+totalExtraRows)*SP_FACTOR)+spacing; } /** * Muestra la lista de conceptos de devoluciones y de bonificaciones multas * @param command * @param receipt */ private static void assignFineBonusConcepts(CPCLCommand command, CoopReceipt receipt) { List<PrintConcept> fineBonusConcepts = Concept.getFineBonusConcepts(receipt.getReceiptId()); int size = fineBonusConcepts.size(); if(size>0) { String[] conceptDescs = new String[size]; String[] conceptAmounts = new String[size]; int totalExtraRows = getPrintableWrappedConceptArrays(fineBonusConcepts, conceptDescs, conceptAmounts); command.justify(Justify.LEFT, 6.7) .multilineText(SP_FACTOR, 0, 1, receiptHeight, conceptDescs) .justify(Justify.RIGHT, 9) .multilineText(SP_FACTOR, 0, 7.4, receiptHeight, conceptAmounts); receiptHeight += (size+totalExtraRows)*SP_FACTOR; } } /** * Asigna la informacin del final de la factura * @param command * @param receipt */ private static void assignFooterData(CPCLCommand command, CoopReceipt receipt, long internalControlCode) { String selectedMsg = wrapFooterContent( ParameterSettingsManager.getParameter(isNewFormat?ParamKey.NEW_MSG:ParamKey.OLD_MSG).getStringValue()); String enterpriseMsg = wrapFooterContent( ParameterSettingsManager.getParameter(ParamKey.ENTERPRISE_MSG).getStringValue()); String literal = wrapLiteral(receipt.getLiteral()+" Bolivianos"); String authDesc = wrapFooterContent(receipt.getAuthorizationDescription()); command.justify(Justify.LEFT, 8) .setFont("TAHOMA8P.CPF") .text(0, 0.6, receiptHeight+=0.15, 0.03, 0.03, "Son:") .multilineText(SP_FACTOR, 0, 1.4, receiptHeight, literal); int spaces = (literal.split("\r\n").length); command.multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=((SP_FACTOR*spaces)+0.1), ("CDIGO DE CONTROL: "+receipt.getControlCode()), ("FECHA LMITE DE EMISIN: "+receipt.getAuthExpirationDate().toString("dd/MM/yyyy"))) .text(0, 7.3, receiptHeight, 0.03, 0.03, "CAJA/"+SessionManager.getLoggedCashdeskNumber()+":"+internalControlCode) .justify(Justify.CENTER, 7.3) .multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=((SP_FACTOR*2)+0.05), enterpriseMsg); spaces = (enterpriseMsg.split("\r\n").length); command.multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=((SP_FACTOR*spaces)+0.3), selectedMsg); spaces = (selectedMsg.split("\r\n").length); if(isNewFormat) command.multilineText(SP_FACTOR, 0, 0.6, receiptHeight+=((SP_FACTOR*spaces)+0.2), authDesc); receiptHeight+=(isNewFormat?(authDesc.split("\r\n").length*SP_FACTOR):(SP_FACTOR*spaces)); generateQR(command, receipt); command.justify(Justify.CENTER).image(0, receiptHeight+=0.3, PrinterImagesManager.FOOTER_IMAGE_IN_PRINTER_NAME); receiptHeight+=5.6; } /** * Genera el cdigo QR de la factura * @param command * @param receipt */ private static void generateQR(CPCLCommand command, CoopReceipt receipt) { String textToCode = ELFEC_NIT+"|"+receipt.getReceiptNumber()+"|"+ receipt.getAuthorizationNumber()+"|"+receipt.getIssueDate().toString("dd/MM/yyyy")+"|"+ AmountsCounter.formatBigDecimal(receipt.getTotalAmount())+"|"+ AmountsCounter.formatBigDecimal(Concept.getSubjectToTaxCreditConcept(receipt.getReceiptId()).getAmount())+"|"+ receipt.getControlCode()+"|"+receipt.getNIT()+"|0|0|"+ AmountsCounter.formatBigDecimal(Concept.getNotSubjectToTaxCreditConcept(receipt.getReceiptId()).getAmount())+"|0"; command.QR(5, 7.5, receiptHeight-2.5, QRQuality.M, textToCode); } /** * Wrapea la cadena del literal tomando en cuenta los 7.3 de limite que se tiene * subsecuente * @param literal * @return la cadena con lineas de salto respentando el LITERAL_WRAP_LIMIT */ private static String wrapLiteral(String literal) { return WordUtils.wrap(literal, LITERAL_WRAP_LIMIT).replace("\n", "\r\n"); } /** * Wrapea la cadena del footer tomando en cuenta los 7.3 de limite que se tiene * subsecuente * @param footerMsg * @return la cadena con lineas de salto respentando el WRAP_LIMIT */ private static String wrapFooterContent(String footerMsg) { return WordUtils.wrap(footerMsg, FOOTER_WRAP_LIMIT).replace("\n", "\r\n"); } /** * Si es necesario parte la descripcion del concepto en pedazos para que se impriman de forma * subsecuente * @param conceptDesc * @return la cadena con lineas de salto respentando el CONCEPT_WRAP_LIMIT */ private static String wrapConcept(String conceptDesc) { return WordUtils.wrap(conceptDesc, CONCEPT_WRAP_LIMIT).replace("\n", "\r\n"); } /** * Si es necesario parte el nombre en pedazos para que se impriman de forma * subsecuente * @param name * @return la cadena con lineas de salto respentando el WRAP_LIMIT */ private static String wrapName(String name) { return WordUtils.wrap(name, WRAP_LIMIT-9).replace("\n", "\r\n"); } /** * Si es necesario parte la direccin en pedazos para que se impriman de forma * subsecuente * @param fullAddress * @return la cadena con lineas de salto respentando el WRAP_LIMIT */ private static String wrapAddress(String fullAddress) { return WordUtils.wrap(fullAddress, WRAP_LIMIT).replace("\n", "\r\n"); } /** * Arma los arrays imprimibles tomando en cuenta el wrap del texto del tamao limite * @param concepts la lista de conceptos de la que se quieren armar sus arrays imprimibles * @param conceptDescs el array de descripciones con las filas formateadas * @param conceptAmounts el array de importes con las filas formateadas * @return la cantidad de filas extra que se aumentaron */ private static int getPrintableWrappedConceptArrays(List<PrintConcept> concepts, String[] conceptDescs, String[] conceptAmounts) { int size = concepts.size(); int totalExtraRows = 0, extraRows; for (int i = 0; i < size; i++) { conceptDescs[i] = wrapConcept(concepts.get(i).getDescription()); extraRows = conceptDescs[i].split("\r\n").length-1; totalExtraRows+=extraRows; conceptAmounts[i] = AmountsCounter.formatBigDecimal(concepts.get(i).getAmount()); for (int j = 0; j < extraRows; j++) { conceptAmounts[i] = conceptAmounts[i]+"\r\n "; } } return totalExtraRows; } }
Cambiada la fehca que se muestra en las facturas a la misma fecha con la que se guardó
src/com/elfec/cobranza/business_logic/printer/ReceiptGenerator.java
Cambiada la fehca que se muestra en las facturas a la misma fecha con la que se guardó
<ide><path>rc/com/elfec/cobranza/business_logic/printer/ReceiptGenerator.java <ide> import com.elfec.cobranza.helpers.text_format.AccountFormatter; <ide> import com.elfec.cobranza.helpers.utils.AmountsCounter; <ide> import com.elfec.cobranza.model.Category; <add>import com.elfec.cobranza.model.CollectionPayment; <ide> import com.elfec.cobranza.model.Concept; <ide> import com.elfec.cobranza.model.CoopReceipt; <ide> import com.elfec.cobranza.model.Supply; <ide> int daysPastDue = Days.daysBetween(receipt.getExpirationDate(), DateTime.now()).getDays(); <ide> String readings = getReadings(receipt.getSupplyStatusSet().getSupplyStatusList()); <ide> double extraSpacing = (readings.split("\r\n").length+8)*SP_FACTOR; <add> CollectionPayment payment = receipt.getActiveCollectionPayment(); <ide> <ide> command.justify(Justify.LEFT, 4.6) <ide> .setFont("TAHOMA8P.CPF") <ide> "DE: "+receipt.getSupplyStatusSet().getLastReadingDate().toString("dd/MM/yyyy") <ide> +" A: "+receipt.getSupplyStatusSet().getDate().toString("dd/MM/yyyy"), <ide> readings, <del> "FECHA PAGO: "+(DateTime.now().toString("dd/MM/yyyy HH:mm")), <add> "FECHA PAGO: "+((payment==null?DateTime.now(): payment.getPaymentDate()).toString("dd/MM/yyyy HH:mm")), <ide> "VENCIMIENTO: "+receipt.getExpirationDate().toString("dd/MM/yyyy"), <ide> "DIAS MOROSIDAD: "+(daysPastDue<0?0:daysPastDue), <ide> "PRXIMA EMISIN: "+receipt.getIssueDate().plusDays(33).toString("dd/MM/yyyy"));
Java
agpl-3.0
59b461e57068f047566a0e1f12dc29a40a5671ac
0
PaulKh/scale-proactive,mnip91/programming-multiactivities,ow2-proactive/programming,acontes/programming,jrochas/scale-proactive,PaulKh/scale-proactive,ow2-proactive/programming,paraita/programming,ow2-proactive/programming,acontes/programming,acontes/programming,fviale/programming,acontes/programming,jrochas/scale-proactive,paraita/programming,lpellegr/programming,fviale/programming,acontes/scheduling,fviale/programming,paraita/programming,paraita/programming,lpellegr/programming,ow2-proactive/programming,lpellegr/programming,jrochas/scale-proactive,acontes/scheduling,mnip91/proactive-component-monitoring,PaulKh/scale-proactive,mnip91/proactive-component-monitoring,mnip91/programming-multiactivities,paraita/programming,mnip91/programming-multiactivities,acontes/programming,lpellegr/programming,ow2-proactive/programming,mnip91/programming-multiactivities,mnip91/proactive-component-monitoring,PaulKh/scale-proactive,jrochas/scale-proactive,fviale/programming,acontes/programming,ow2-proactive/programming,acontes/scheduling,mnip91/programming-multiactivities,lpellegr/programming,PaulKh/scale-proactive,acontes/scheduling,acontes/scheduling,PaulKh/scale-proactive,paraita/programming,mnip91/proactive-component-monitoring,acontes/programming,jrochas/scale-proactive,fviale/programming,mnip91/programming-multiactivities,acontes/scheduling,PaulKh/scale-proactive,fviale/programming,mnip91/proactive-component-monitoring,acontes/scheduling,jrochas/scale-proactive,lpellegr/programming,mnip91/proactive-component-monitoring,jrochas/scale-proactive
/* * ################################################################ * * ProActive: The Java(TM) library for Parallel, Distributed, * Concurrent computing with Security and Mobility * * Copyright (C) 1997-2002 INRIA/University of Nice-Sophia Antipolis * Contact: [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * Initial developer(s): The ProActive Team * http://www.inria.fr/oasis/ProActive/contacts.html * Contributor(s): * * ################################################################ */ package org.objectweb.proactive.core.descriptor.xml; import org.objectweb.proactive.core.descriptor.data.ProActiveDescriptor; import org.objectweb.proactive.core.descriptor.data.VirtualMachine; import org.objectweb.proactive.core.descriptor.data.VirtualNode; import org.objectweb.proactive.core.descriptor.data.VirtualNodeImpl; import org.objectweb.proactive.core.descriptor.data.VirtualNodeLookup; import org.objectweb.proactive.core.util.UrlBuilder; import org.objectweb.proactive.core.xml.handler.BasicUnmarshaller; import org.objectweb.proactive.core.xml.handler.CollectionUnmarshaller; import org.objectweb.proactive.core.xml.handler.PassiveCompositeUnmarshaller; import org.objectweb.proactive.core.xml.handler.UnmarshallerHandler; import org.objectweb.proactive.core.xml.io.Attributes; /** * This class receives deployment events * * @author ProActive Team * @version 1.0, 2002/09/20 * @since ProActive 0.9.3 */ class DeploymentHandler extends PassiveCompositeUnmarshaller implements ProActiveDescriptorConstants { private ProActiveDescriptor proActiveDescriptor; // // ----- PRIVATE MEMBERS ----------------------------------------------------------------------------------- // // // ----- CONSTRUCTORS ----------------------------------------------------------------------------------- // public DeploymentHandler(ProActiveDescriptor proActiveDescriptor) { super(false); this.proActiveDescriptor = proActiveDescriptor; this.addHandler(REGISTER_TAG, new RegisterHandler()); this.addHandler(LOOKUP_TAG, new LookupHandler()); { PassiveCompositeUnmarshaller ch = new PassiveCompositeUnmarshaller(); ch.addHandler(MAP_TAG, new MapHandler()); this.addHandler(MAPPING_TAG, ch); } { PassiveCompositeUnmarshaller ch = new PassiveCompositeUnmarshaller(); ch.addHandler(JVM_TAG, new JVMHandler()); this.addHandler(JVMS_TAG, ch); } } // // ----- PUBLIC METHODS ----------------------------------------------------------------------------------- // // // -- implements UnmarshallerHandler ------------------------------------------------------ // // // ----- PRIVATE METHODS ----------------------------------------------------------------------------------- // // // ----- INNER CLASSES ----------------------------------------------------------------------------------- // private class RegisterHandler extends BasicUnmarshaller { private RegisterHandler() { } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { String vn = attributes.getValue("virtualNode"); if (!checkNonEmpty(vn)) { throw new org.xml.sax.SAXException( "register Tag without any virtualnode defined"); } String protocol = attributes.getValue("protocol"); if (!checkNonEmpty(protocol)) { protocol = System.getProperty("proactive.communication.protocol"); } protocol = UrlBuilder.checkProtocol(protocol); VirtualNodeImpl vnImpl = (VirtualNodeImpl) proActiveDescriptor.createVirtualNode(vn, false); //VirtualNodeImpl vnImpl= (VirtualNodeImpl)vnStrat.getVirtualNode(); //vnImpl.setRegistrationValue(true); vnImpl.setRegistrationProtocol(protocol); } } private class LookupHandler extends BasicUnmarshaller { private LookupHandler() { } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { String vnLookup = attributes.getValue("virtualNode"); if (!checkNonEmpty(vnLookup)) { throw new org.xml.sax.SAXException( "lookup Tag without any virtualnode defined"); } String protocol = attributes.getValue("protocol"); if (!checkNonEmpty(protocol)) { throw new org.xml.sax.SAXException( "lookup Tag without any protocol defined"); } String host = attributes.getValue("host"); if (!checkNonEmpty(host) && protocol.equals("rmi")) { throw new org.xml.sax.SAXException( "within a lookup tag attribute host must be defined for rmi protocol"); } protocol = UrlBuilder.checkProtocol(protocol); String url = UrlBuilder.buildUrl(host, vnLookup, protocol); VirtualNodeLookup vn = (VirtualNodeLookup) proActiveDescriptor.createVirtualNode(vnLookup, true); // vn.setLookupInformations(url,protocol); String port = attributes.getValue("port"); //System.out.println(port); if (checkNonEmpty(port)) { if (protocol.equals("jini:")) { throw new org.xml.sax.SAXException( "For a jini lookup, no port number should be specified"); } url = UrlBuilder.buildUrl(host, vnLookup, protocol, new Integer(port).intValue()); vn.setLookupInformations(url, protocol, new Integer(port).intValue()); //if no port is specified we use 1099 since it is the default port. Even if it is jini // the UrlBuilder will not use the port when building the url } else { vn.setLookupInformations(url, protocol, 1099); } } } /** * This class receives map events */ private class MapHandler extends PassiveCompositeUnmarshaller { VirtualNode vn; private MapHandler() { // CollectionUnmarshaller cu = new CollectionUnmarshaller(String.class); // cu.addHandler(VMNAME_TAG, new VmNameHandler()); // this.addHandler(JVMSET_TAG, cu); this.addHandler(JVMSET_TAG, new JvmSetHandler()); } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { // create and register a VirtualNode String vnName = attributes.getValue("virtualNode"); if (!checkNonEmpty(vnName)) { throw new org.xml.sax.SAXException( "mapping defined without specifying virtual node"); } vn = proActiveDescriptor.createVirtualNode(vnName, false); } protected void notifyEndActiveHandler(String name, UnmarshallerHandler activeHandler) throws org.xml.sax.SAXException { if (name.equals(JVMSET_TAG)) { String[] vmNames = (String[]) activeHandler.getResultObject(); //throws an exception if vn has property unique or unique_singleAO and more than one vm are defined if ((vmNames.length > 1) && (vn.getProperty() != null) && (vn.getProperty().equals("unique") || vn.getProperty().equals("unique_singleAO"))) { throw new org.xml.sax.SAXException( "a set of virtual machine is defined for a virtualNode that is unique"); } if (vmNames.length > 0) { for (int i = 0; i < vmNames.length; i++) { VirtualMachine vm = proActiveDescriptor.createVirtualMachine(vmNames[i]); if (vm.getCreatorId() == null) { vm.setCreatorId(vn.getName()); } vn.addVirtualMachine(vm); } } } } // // -- INNER CLASSES ------------------------------------------------------ // private class JvmSetHandler extends CollectionUnmarshaller { protected JvmSetHandler() { super(String.class); this.addHandler(VMNAME_TAG, new VmNameHandler()); this.addHandler(CURRENTJVM_TAG, new CurrentJvmHandler()); } protected void notifyEndActiveHandler(String name, UnmarshallerHandler activeHandler) throws org.xml.sax.SAXException { if (name.equals(CURRENTJVM_TAG)) { String protocol = (String) activeHandler.getResultObject(); if (!checkNonEmpty(protocol)) { protocol = System.getProperty("proactive.communication.protocol"); } vn.createNodeOnCurrentJvm(protocol); } else { super.notifyEndActiveHandler(name, activeHandler); } } //end of inner class JvmSetHandler private class VmNameHandler extends BasicUnmarshaller { private VmNameHandler() { } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { String vmName = attributes.getValue("value"); if (checkNonEmpty(vmName)) { setResultObject(vmName); } else { throw new org.xml.sax.SAXException( "The name of the Jvm cannot be set to an empty string"); } } } //end of inner class VmNameHandler private class CurrentJvmHandler extends BasicUnmarshaller { private CurrentJvmHandler() { } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { String protocol = attributes.getValue("protocol"); setResultObject(protocol); } } // end of inner class CurrentJvmHandler } // end of inner class JvmSetHandler } // end inner class MapHandler /** * This class receives jvm events */ private class JVMHandler extends PassiveCompositeUnmarshaller { private VirtualMachine currentVM; private JVMHandler() { this.addHandler(ACQUISITION_TAG, new AcquisitionHandler()); this.addHandler(CREATION_PROCESS_TAG, new CreationHandler()); } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { // create and register a VirtualNode String vmName = attributes.getValue("name"); if (!checkNonEmpty(vmName)) { throw new org.xml.sax.SAXException( "VirtualMachine defined without name"); } currentVM = proActiveDescriptor.createVirtualMachine(vmName); String nodeNumber = attributes.getValue("nodeNumber"); try { if (checkNonEmpty(nodeNumber)) { currentVM.setHostsNumber(nodeNumber); } } catch (java.io.IOException e) { throw new org.xml.sax.SAXException(e); } } /** * This class receives acquisition events */ private class AcquisitionHandler extends PassiveCompositeUnmarshaller { private AcquisitionHandler() { this.addHandler(SERVICE_REFERENCE_TAG, new ProcessReferenceHandler()); } protected void notifyEndActiveHandler(String name, UnmarshallerHandler activeHandler) throws org.xml.sax.SAXException { Object o = activeHandler.getResultObject(); if (o == null) { return; } proActiveDescriptor.registerService(currentVM, (String) o); } // public void startContextElement(String name, Attributes attributes) // throws org.xml.sax.SAXException { // String runtimeURL = attributes.getValue("url"); // // //String portNumber = attributes.getValue("port"); // if (runtimeURL != null) { // String protocol = UrlBuilder.getProtocol(runtimeURL); // String url = UrlBuilder.removeProtocol(runtimeURL, protocol); // proActiveDescriptor.registerProcess(currentVM, // (String) runtimeURL); // ProActiveRuntime proActiveRuntimeRegistered = null; // try { // proActiveRuntimeRegistered = RuntimeFactory.getRuntime(url, // protocol); // } catch (ProActiveException e) { // e.printStackTrace(); // } // currentVM.setAcquired(true); // currentVM.setRemoteRuntime(proActiveRuntimeRegistered); // //currentVM.setAcquisitionMethod(acquisitionMethod); // } //// if (portNumber != null) { ///// currentVM.setPortNumber(portNumber); //// } // } } // end inner class AcquisitionHandler /** * This class receives acquisition events */ private class CreationHandler extends PassiveCompositeUnmarshaller { private CreationHandler() { this.addHandler(PROCESS_REFERENCE_TAG, new ProcessReferenceHandler()); } protected void notifyEndActiveHandler(String name, UnmarshallerHandler activeHandler) throws org.xml.sax.SAXException { Object o = activeHandler.getResultObject(); if (o == null) { return; } if (o instanceof String) { // its an id proActiveDescriptor.registerProcess(currentVM, (String) o); } } } // end inner class CreationHandler } // end inner class JVMHandler }
src/org/objectweb/proactive/core/descriptor/xml/DeploymentHandler.java
/* * ################################################################ * * ProActive: The Java(TM) library for Parallel, Distributed, * Concurrent computing with Security and Mobility * * Copyright (C) 1997-2002 INRIA/University of Nice-Sophia Antipolis * Contact: [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * Initial developer(s): The ProActive Team * http://www.inria.fr/oasis/ProActive/contacts.html * Contributor(s): * * ################################################################ */ package org.objectweb.proactive.core.descriptor.xml; import org.objectweb.proactive.core.descriptor.data.ProActiveDescriptor; import org.objectweb.proactive.core.descriptor.data.VirtualMachine; import org.objectweb.proactive.core.descriptor.data.VirtualNode; import org.objectweb.proactive.core.descriptor.data.VirtualNodeImpl; import org.objectweb.proactive.core.descriptor.data.VirtualNodeLookup; import org.objectweb.proactive.core.util.UrlBuilder; import org.objectweb.proactive.core.xml.handler.BasicUnmarshaller; import org.objectweb.proactive.core.xml.handler.CollectionUnmarshaller; import org.objectweb.proactive.core.xml.handler.PassiveCompositeUnmarshaller; import org.objectweb.proactive.core.xml.handler.UnmarshallerHandler; import org.objectweb.proactive.core.xml.io.Attributes; /** * This class receives deployment events * * @author ProActive Team * @version 1.0, 2002/09/20 * @since ProActive 0.9.3 */ class DeploymentHandler extends PassiveCompositeUnmarshaller implements ProActiveDescriptorConstants { private ProActiveDescriptor proActiveDescriptor; // // ----- PRIVATE MEMBERS ----------------------------------------------------------------------------------- // // // ----- CONSTRUCTORS ----------------------------------------------------------------------------------- // public DeploymentHandler(ProActiveDescriptor proActiveDescriptor) { super(false); this.proActiveDescriptor = proActiveDescriptor; this.addHandler(REGISTER_TAG, new RegisterHandler()); this.addHandler(LOOKUP_TAG, new LookupHandler()); { PassiveCompositeUnmarshaller ch = new PassiveCompositeUnmarshaller(); ch.addHandler(MAP_TAG, new MapHandler()); this.addHandler(MAPPING_TAG, ch); } { PassiveCompositeUnmarshaller ch = new PassiveCompositeUnmarshaller(); ch.addHandler(JVM_TAG, new JVMHandler()); this.addHandler(JVMS_TAG, ch); } } // // ----- PUBLIC METHODS ----------------------------------------------------------------------------------- // // // -- implements UnmarshallerHandler ------------------------------------------------------ // // // ----- PRIVATE METHODS ----------------------------------------------------------------------------------- // // // ----- INNER CLASSES ----------------------------------------------------------------------------------- // private class RegisterHandler extends BasicUnmarshaller { private RegisterHandler() { } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { String vn = attributes.getValue("virtualNode"); if (!checkNonEmpty(vn)) { throw new org.xml.sax.SAXException( "register Tag without any virtualnode defined"); } String protocol = attributes.getValue("protocol"); if (!checkNonEmpty(protocol)) { throw new org.xml.sax.SAXException( "lookup Tag without any protocol defined"); } protocol = UrlBuilder.checkProtocol(protocol); VirtualNodeImpl vnImpl = (VirtualNodeImpl) proActiveDescriptor.createVirtualNode(vn, false); //VirtualNodeImpl vnImpl= (VirtualNodeImpl)vnStrat.getVirtualNode(); //vnImpl.setRegistrationValue(true); vnImpl.setRegistrationProtocol(protocol); } } private class LookupHandler extends BasicUnmarshaller { private LookupHandler() { } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { String vnLookup = attributes.getValue("virtualNode"); if (!checkNonEmpty(vnLookup)) { throw new org.xml.sax.SAXException( "lookup Tag without any virtualnode defined"); } String protocol = attributes.getValue("protocol"); if (!checkNonEmpty(protocol)) { throw new org.xml.sax.SAXException( "lookup Tag without any protocol defined"); } String host = attributes.getValue("host"); if (!checkNonEmpty(host) && protocol.equals("rmi")) { throw new org.xml.sax.SAXException( "within a lookup tag attribute host must be defined for rmi protocol"); } protocol = UrlBuilder.checkProtocol(protocol); String url = UrlBuilder.buildUrl(host, vnLookup, protocol); VirtualNodeLookup vn = (VirtualNodeLookup) proActiveDescriptor.createVirtualNode(vnLookup, true); // vn.setLookupInformations(url,protocol); String port = attributes.getValue("port"); //System.out.println(port); if (checkNonEmpty(port)) { if (protocol.equals("jini:")) { throw new org.xml.sax.SAXException( "For a jini lookup, no port number should be specified"); } url = UrlBuilder.buildUrl(host, vnLookup, protocol, new Integer(port).intValue()); vn.setLookupInformations(url, protocol, new Integer(port).intValue()); //if no port is specified we use 1099 since it is the default port. Even if it is jini // the UrlBuilder will not use the port when building the url } else { vn.setLookupInformations(url, protocol, 1099); } } } /** * This class receives map events */ private class MapHandler extends PassiveCompositeUnmarshaller { VirtualNode vn; private MapHandler() { // CollectionUnmarshaller cu = new CollectionUnmarshaller(String.class); // cu.addHandler(VMNAME_TAG, new VmNameHandler()); // this.addHandler(JVMSET_TAG, cu); this.addHandler(JVMSET_TAG, new JvmSetHandler()); } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { // create and register a VirtualNode String vnName = attributes.getValue("virtualNode"); if (!checkNonEmpty(vnName)) { throw new org.xml.sax.SAXException( "mapping defined without specifying virtual node"); } vn = proActiveDescriptor.createVirtualNode(vnName, false); } protected void notifyEndActiveHandler(String name, UnmarshallerHandler activeHandler) throws org.xml.sax.SAXException { if (name.equals(JVMSET_TAG)) { String[] vmNames = (String[]) activeHandler.getResultObject(); //throws an exception if vn has property unique or unique_singleAO and more than one vm are defined if ((vmNames.length > 1) && (vn.getProperty() != null) && (vn.getProperty().equals("unique") || vn.getProperty().equals("unique_singleAO"))) { throw new org.xml.sax.SAXException( "a set of virtual machine is defined for a virtualNode that is unique"); } if (vmNames.length > 0) { for (int i = 0; i < vmNames.length; i++) { VirtualMachine vm = proActiveDescriptor.createVirtualMachine(vmNames[i]); if (vm.getCreatorId() == null) { vm.setCreatorId(vn.getName()); } vn.addVirtualMachine(vm); } } } } // // -- INNER CLASSES ------------------------------------------------------ // private class JvmSetHandler extends CollectionUnmarshaller { protected JvmSetHandler() { super(String.class); this.addHandler(VMNAME_TAG, new VmNameHandler()); this.addHandler(CURRENTJVM_TAG, new CurrentJvmHandler()); } protected void notifyEndActiveHandler(String name, UnmarshallerHandler activeHandler) throws org.xml.sax.SAXException { if (name.equals(CURRENTJVM_TAG)) { String protocol = (String) activeHandler.getResultObject(); vn.createNodeOnCurrentJvm(protocol); } else { super.notifyEndActiveHandler(name, activeHandler); } } //end of inner class JvmSetHandler private class VmNameHandler extends BasicUnmarshaller { private VmNameHandler() { } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { String vmName = attributes.getValue("value"); if (checkNonEmpty(vmName)) { setResultObject(vmName); } else { throw new org.xml.sax.SAXException( "The name of the Jvm cannot be set to an empty string"); } } } //end of inner class VmNameHandler private class CurrentJvmHandler extends BasicUnmarshaller { private CurrentJvmHandler() { } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { String protocol = attributes.getValue("protocol"); setResultObject(protocol); } } // end of inner class CurrentJvmHandler } // end of inner class JvmSetHandler } // end inner class MapHandler /** * This class receives jvm events */ private class JVMHandler extends PassiveCompositeUnmarshaller { private VirtualMachine currentVM; private JVMHandler() { this.addHandler(ACQUISITION_TAG, new AcquisitionHandler()); this.addHandler(CREATION_PROCESS_TAG, new CreationHandler()); } public void startContextElement(String name, Attributes attributes) throws org.xml.sax.SAXException { // create and register a VirtualNode String vmName = attributes.getValue("name"); if (!checkNonEmpty(vmName)) { throw new org.xml.sax.SAXException( "VirtualMachine defined without name"); } currentVM = proActiveDescriptor.createVirtualMachine(vmName); String nodeNumber = attributes.getValue("nodeNumber"); try { if (checkNonEmpty(nodeNumber)) { currentVM.setHostsNumber(nodeNumber); } } catch (java.io.IOException e) { throw new org.xml.sax.SAXException(e); } } /** * This class receives acquisition events */ private class AcquisitionHandler extends PassiveCompositeUnmarshaller { private AcquisitionHandler() { this.addHandler(SERVICE_REFERENCE_TAG, new ProcessReferenceHandler()); } protected void notifyEndActiveHandler(String name, UnmarshallerHandler activeHandler) throws org.xml.sax.SAXException { Object o = activeHandler.getResultObject(); if (o == null) { return; } proActiveDescriptor.registerService(currentVM, (String)o); } // public void startContextElement(String name, Attributes attributes) // throws org.xml.sax.SAXException { // String runtimeURL = attributes.getValue("url"); // // //String portNumber = attributes.getValue("port"); // if (runtimeURL != null) { // String protocol = UrlBuilder.getProtocol(runtimeURL); // String url = UrlBuilder.removeProtocol(runtimeURL, protocol); // proActiveDescriptor.registerProcess(currentVM, // (String) runtimeURL); // ProActiveRuntime proActiveRuntimeRegistered = null; // try { // proActiveRuntimeRegistered = RuntimeFactory.getRuntime(url, // protocol); // } catch (ProActiveException e) { // e.printStackTrace(); // } // currentVM.setAcquired(true); // currentVM.setRemoteRuntime(proActiveRuntimeRegistered); // //currentVM.setAcquisitionMethod(acquisitionMethod); // } //// if (portNumber != null) { ///// currentVM.setPortNumber(portNumber); //// } // } } // end inner class AcquisitionHandler /** * This class receives acquisition events */ private class CreationHandler extends PassiveCompositeUnmarshaller { private CreationHandler() { this.addHandler(PROCESS_REFERENCE_TAG, new ProcessReferenceHandler()); } protected void notifyEndActiveHandler(String name, UnmarshallerHandler activeHandler) throws org.xml.sax.SAXException { Object o = activeHandler.getResultObject(); if (o == null) { return; } if (o instanceof String) { // its an id proActiveDescriptor.registerProcess(currentVM, (String) o); } } } // end inner class CreationHandler } // end inner class JVMHandler }
optional protocol added git-svn-id: 9146c88ff6d39b48099bf954d15d68f687b3fa69@1826 28e8926c-6b08-0410-baaa-805c5e19b8d6
src/org/objectweb/proactive/core/descriptor/xml/DeploymentHandler.java
optional protocol added
<ide><path>rc/org/objectweb/proactive/core/descriptor/xml/DeploymentHandler.java <ide> } <ide> String protocol = attributes.getValue("protocol"); <ide> if (!checkNonEmpty(protocol)) { <del> throw new org.xml.sax.SAXException( <del> "lookup Tag without any protocol defined"); <add> protocol = System.getProperty("proactive.communication.protocol"); <ide> } <ide> protocol = UrlBuilder.checkProtocol(protocol); <ide> VirtualNodeImpl vnImpl = (VirtualNodeImpl) proActiveDescriptor.createVirtualNode(vn, <ide> throws org.xml.sax.SAXException { <ide> if (name.equals(CURRENTJVM_TAG)) { <ide> String protocol = (String) activeHandler.getResultObject(); <add> if (!checkNonEmpty(protocol)) { <add> protocol = System.getProperty("proactive.communication.protocol"); <add> } <ide> vn.createNodeOnCurrentJvm(protocol); <ide> } else { <ide> super.notifyEndActiveHandler(name, activeHandler); <ide> private class AcquisitionHandler extends PassiveCompositeUnmarshaller { <ide> private AcquisitionHandler() { <ide> this.addHandler(SERVICE_REFERENCE_TAG, <del> new ProcessReferenceHandler()); <del> } <del> <add> new ProcessReferenceHandler()); <add> } <add> <ide> protected void notifyEndActiveHandler(String name, <del> UnmarshallerHandler activeHandler) <del> throws org.xml.sax.SAXException { <del> Object o = activeHandler.getResultObject(); <del> if (o == null) { <del> return; <del> } <del> proActiveDescriptor.registerService(currentVM, (String)o); <del> } <del> <del>// public void startContextElement(String name, Attributes attributes) <del>// throws org.xml.sax.SAXException { <del>// String runtimeURL = attributes.getValue("url"); <del>// <del>// //String portNumber = attributes.getValue("port"); <del>// if (runtimeURL != null) { <del>// String protocol = UrlBuilder.getProtocol(runtimeURL); <del>// String url = UrlBuilder.removeProtocol(runtimeURL, protocol); <del>// proActiveDescriptor.registerProcess(currentVM, <del>// (String) runtimeURL); <del>// ProActiveRuntime proActiveRuntimeRegistered = null; <del>// try { <del>// proActiveRuntimeRegistered = RuntimeFactory.getRuntime(url, <del>// protocol); <del>// } catch (ProActiveException e) { <del>// e.printStackTrace(); <del>// } <del>// currentVM.setAcquired(true); <del>// currentVM.setRemoteRuntime(proActiveRuntimeRegistered); <del>// //currentVM.setAcquisitionMethod(acquisitionMethod); <del>// } <del> <del> //// if (portNumber != null) { <del> ///// currentVM.setPortNumber(portNumber); <del> //// } <del> // } <add> UnmarshallerHandler activeHandler) <add> throws org.xml.sax.SAXException { <add> Object o = activeHandler.getResultObject(); <add> if (o == null) { <add> return; <add> } <add> proActiveDescriptor.registerService(currentVM, (String) o); <add> } <add> <add> // public void startContextElement(String name, Attributes attributes) <add> // throws org.xml.sax.SAXException { <add> // String runtimeURL = attributes.getValue("url"); <add> // <add> // //String portNumber = attributes.getValue("port"); <add> // if (runtimeURL != null) { <add> // String protocol = UrlBuilder.getProtocol(runtimeURL); <add> // String url = UrlBuilder.removeProtocol(runtimeURL, protocol); <add> // proActiveDescriptor.registerProcess(currentVM, <add> // (String) runtimeURL); <add> // ProActiveRuntime proActiveRuntimeRegistered = null; <add> // try { <add> // proActiveRuntimeRegistered = RuntimeFactory.getRuntime(url, <add> // protocol); <add> // } catch (ProActiveException e) { <add> // e.printStackTrace(); <add> // } <add> // currentVM.setAcquired(true); <add> // currentVM.setRemoteRuntime(proActiveRuntimeRegistered); <add> // //currentVM.setAcquisitionMethod(acquisitionMethod); <add> // } <add> //// if (portNumber != null) { <add> ///// currentVM.setPortNumber(portNumber); <add> //// } <add> // } <ide> } <ide> <ide> // end inner class AcquisitionHandler
Java
apache-2.0
895975eab3691e4d85028feec859fa20dc2cac16
0
ragerri/opennlp,ragerri/opennlp,ragerri/opennlp,jzonthemtn/opennlp,jzonthemtn/opennlp,apache/opennlp,apache/opennlp,apache/opennlp,jzonthemtn/opennlp
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.cmdline.namefind; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.Charset; import opennlp.tools.cmdline.ArgumentParser.OptionalParameter; import opennlp.tools.cmdline.ArgumentParser.ParameterDescription; import opennlp.tools.cmdline.BasicCmdLineTool; import opennlp.tools.cmdline.CmdLineUtil; import opennlp.tools.cmdline.TerminateToolException; import opennlp.tools.dictionary.Dictionary; import opennlp.tools.formats.NameFinderCensus90NameStream; import opennlp.tools.util.ObjectStream; import opennlp.tools.util.StringList; /** * This tool helps create a loadable dictionary for the {@code NameFinder}, * from data collected from US Census data. * <p> * Data for the US Census and names can be found here for the 1990 Census: * <br> * <a href="http://www.census.gov/genealogy/names/names_files.html">www.census.gov</a> */ public class CensusDictionaryCreatorTool extends BasicCmdLineTool { /** * Create a list of expected parameters. */ interface Parameters { @ParameterDescription(valueName = "code") @OptionalParameter(defaultValue = "en") String getLang(); @ParameterDescription(valueName = "charsetName") @OptionalParameter(defaultValue="UTF-8") String getEncoding(); @ParameterDescription(valueName = "censusDict") String getCensusData(); @ParameterDescription(valueName = "dict") String getDict(); } public String getShortDescription() { return "Converts 1990 US Census names into a dictionary"; } public String getHelp() { return getBasicHelp(Parameters.class); } /** * Creates a dictionary. * * @param sampleStream stream of samples. * @return a {@code Dictionary} class containing the name dictionary * built from the input file. * @throws IOException IOException */ public static Dictionary createDictionary(ObjectStream<StringList> sampleStream) throws IOException { Dictionary mNameDictionary = new Dictionary(true); StringList entry; entry = sampleStream.read(); while (entry != null) { if (!mNameDictionary.contains(entry)) { mNameDictionary.put(entry); } entry = sampleStream.read(); } return mNameDictionary; } public void run(String[] args) { Parameters params = validateAndParseParams(args, Parameters.class); File testData = new File(params.getCensusData()); File dictOutFile = new File(params.getDict()); CmdLineUtil.checkInputFile("Name data", testData); CmdLineUtil.checkOutputFile("Dictionary file", dictOutFile); FileInputStream sampleDataIn = CmdLineUtil.openInFile(testData); ObjectStream<StringList> sampleStream = new NameFinderCensus90NameStream(sampleDataIn, Charset.forName(params.getEncoding())); Dictionary mDictionary; try { System.out.println("Creating Dictionary..."); mDictionary = createDictionary(sampleStream); } catch (IOException e) { throw new TerminateToolException(-1, "IO error while reading training data or indexing data: " + e.getMessage(), e); } finally { try { sampleStream.close(); } catch(IOException e) { // sorry this can fail.. } } System.out.println("Saving Dictionary..."); try (OutputStream out = new FileOutputStream(dictOutFile)) { mDictionary.serialize(out); } catch (IOException e) { throw new TerminateToolException(-1, "IO error while writing dictionary file: " + e.getMessage(), e); } } }
opennlp-tools/src/main/java/opennlp/tools/cmdline/namefind/CensusDictionaryCreatorTool.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.cmdline.namefind; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.Charset; import opennlp.tools.cmdline.ArgumentParser.OptionalParameter; import opennlp.tools.cmdline.ArgumentParser.ParameterDescription; import opennlp.tools.cmdline.BasicCmdLineTool; import opennlp.tools.cmdline.CmdLineUtil; import opennlp.tools.cmdline.TerminateToolException; import opennlp.tools.dictionary.Dictionary; import opennlp.tools.formats.NameFinderCensus90NameStream; import opennlp.tools.util.ObjectStream; import opennlp.tools.util.StringList; /** * This tool helps create a loadable dictionary for the {@code NameFinder}, * from data collected from US Census data. * <p> * Data for the US Census and names can be found here for the 1990 Census: * <br> * <a href="http://www.census.gov/genealogy/names/names_files.html">www.census.gov</a> */ public class CensusDictionaryCreatorTool extends BasicCmdLineTool { /** * Create a list of expected parameters. */ interface Parameters { @ParameterDescription(valueName = "code") @OptionalParameter(defaultValue = "en") String getLang(); @ParameterDescription(valueName = "charsetName") @OptionalParameter(defaultValue="UTF-8") String getEncoding(); @ParameterDescription(valueName = "censusDict") String getCensusData(); @ParameterDescription(valueName = "dict") String getDict(); } public String getShortDescription() { return "Converts 1990 US Census names into a dictionary"; } public String getHelp() { return getBasicHelp(Parameters.class); } /** * Creates a dictionary. * * @param sampleStream stream of samples. * @return a {@code Dictionary} class containing the name dictionary * built from the input file. * @throws IOException IOException */ public static Dictionary createDictionary(ObjectStream<StringList> sampleStream) throws IOException { Dictionary mNameDictionary = new Dictionary(true); StringList entry; entry = sampleStream.read(); while (entry != null) { if (!mNameDictionary.contains(entry)) { mNameDictionary.put(entry); } entry = sampleStream.read(); } return mNameDictionary; } public void run(String[] args) { Parameters params = validateAndParseParams(args, Parameters.class); File testData = new File(params.getCensusData()); File dictOutFile = new File(params.getDict()); CmdLineUtil.checkInputFile("Name data", testData); CmdLineUtil.checkOutputFile("Dictionary file", dictOutFile); FileInputStream sampleDataIn = CmdLineUtil.openInFile(testData); ObjectStream<StringList> sampleStream = new NameFinderCensus90NameStream(sampleDataIn, Charset.forName(params.getEncoding())); Dictionary mDictionary; try { System.out.println("Creating Dictionary..."); mDictionary = createDictionary(sampleStream); } catch (IOException e) { throw new TerminateToolException(-1, "IO error while reading training data or indexing data: " + e.getMessage(), e); } finally { try { sampleStream.close(); } catch(IOException e) { // sorry this can fail.. } } System.out.println("Saving Dictionary..."); OutputStream out = null; try { out = new FileOutputStream(dictOutFile); mDictionary.serialize(out); } catch (IOException e) { throw new TerminateToolException(-1, "IO error while writing dictionary file: " + e.getMessage(), e); } finally { if (out != null) try { out.close(); } catch (IOException e) { // file might be damaged throw new TerminateToolException(-1, "Attention: Failed to correctly write dictionary:" + e.getMessage(), e); } } } }
Use try with resources to write dictionary See issue OPENNLP-872
opennlp-tools/src/main/java/opennlp/tools/cmdline/namefind/CensusDictionaryCreatorTool.java
Use try with resources to write dictionary
<ide><path>pennlp-tools/src/main/java/opennlp/tools/cmdline/namefind/CensusDictionaryCreatorTool.java <ide> <ide> System.out.println("Saving Dictionary..."); <ide> <del> OutputStream out = null; <del> <del> try { <del> out = new FileOutputStream(dictOutFile); <add> try (OutputStream out = new FileOutputStream(dictOutFile)) { <ide> mDictionary.serialize(out); <ide> } catch (IOException e) { <ide> throw new TerminateToolException(-1, "IO error while writing dictionary file: " <ide> + e.getMessage(), e); <ide> } <del> finally { <del> if (out != null) <del> try { <del> out.close(); <del> } catch (IOException e) { <del> // file might be damaged <del> throw new TerminateToolException(-1, "Attention: Failed to correctly write dictionary:" + <del> e.getMessage(), e); <del> } <del> } <ide> } <ide> }
JavaScript
mit
9e8a3bd0b3a9ad5d884b34b0f4c606dcdbbead43
0
socketio/engine.io-client,Automattic/engine.io-client,BernhardRode/engine.io-client,julianduque/engine.io-client,socketio/engine.io-client,juancancela/engine.io-client,vedmalex/engine.io-client,AnyPresence/engine.io-client,juancarloscancela/engine.io-client,nkzawa/engine.io-client,lwahonen/engine.io-client,enounca/engine.io-client,sanemat/engine.io-client,jskrzypek/engine.io-client,SpainTrain/engine.io-client,gsklee/engine.io-client,socketio/engine.io-client,hellpf/engine.io-client
/** * Module dependencies. */ var util = require('./util') , transports = require('./transports') , Emitter = require('./emitter') , debug = require('debug')('engine-client:socket'); /** * Module exports. */ module.exports = Socket; /** * Socket constructor. * * @param {Object} options * @api public */ function Socket(opts){ if (!(this instanceof Socket)) return new Socket(opts); if ('string' == typeof opts) { var uri = util.parseUri(opts); opts = arguments[1] || {}; opts.host = uri.host; opts.secure = uri.protocol == 'https' || uri.protocol == 'wss'; opts.port = uri.port; } opts = opts || {}; this.secure = null != opts.secure ? opts.secure : (global.location && 'https:' == location.protocol); this.host = opts.host || opts.hostname || (global.location ? location.hostname : 'localhost'); this.port = opts.port || (global.location && location.port ? location.port : (this.secure ? 443 : 80)); this.query = opts.query || {}; this.query.uid = rnd(); this.upgrade = false !== opts.upgrade; this.resource = opts.resource || 'default'; this.path = (opts.path || '/engine.io').replace(/\/$/, ''); this.path += '/' + this.resource + '/'; this.forceJSONP = !!opts.forceJSONP; this.timestampParam = opts.timestampParam || 't'; this.timestampRequests = !!opts.timestampRequests; this.flashPath = opts.flashPath || ''; this.transports = opts.transports || ['polling', 'websocket', 'flashsocket']; this.readyState = ''; this.writeBuffer = []; this.policyPort = opts.policyPort || 843; this.open(); Socket.sockets.push(this); Socket.sockets.evs.emit('add', this); }; /** * Mix in `Emitter`. */ Emitter(Socket.prototype); /** * Static EventEmitter. */ Socket.sockets = []; Socket.sockets.evs = new Emitter; /** * Compat. */ Socket.Socket = Socket; /** * Creates transport of the given type. * * @param {String} transport name * @return {Transport} * @api private */ Socket.prototype.createTransport = function (name) { debug('creating transport "%s"', name); var query = clone(this.query); query.transport = name; if (this.id) { query.sid = this.id; } var transport = new transports[name]({ host: this.host , port: this.port , secure: this.secure , path: this.path , query: query , forceJSONP: this.forceJSONP , timestampRequests: this.timestampRequests , timestampParam: this.timestampParam , flashPath: this.flashPath , policyPort: this.policyPort }); return transport; }; function clone (obj) { var o = {}; for (var i in obj) { if (obj.hasOwnProperty(i)) { o[i] = obj[i]; } } return o; } /** * Initializes transport to use and starts probe. * * @api private */ Socket.prototype.open = function () { this.readyState = 'opening'; var transport = this.createTransport(this.transports[0]); transport.open(); this.setTransport(transport); }; /** * Sets the current transport. Disables the existing one (if any). * * @api private */ Socket.prototype.setTransport = function (transport) { var self = this; if (this.transport) { debug('clearing existing transport'); this.transport.removeAllListeners(); } // set up transport this.transport = transport; // set up transport listeners transport .on('drain', function () { self.flush(); }) .on('packet', function (packet) { self.onPacket(packet); }) .on('error', function (e) { self.onError(e); }) .on('close', function () { self.onClose('transport close'); }); }; /** * Probes a transport. * * @param {String} transport name * @api private */ Socket.prototype.probe = function (name) { debug('probing transport "%s"', name); var transport = this.createTransport(name, { probe: 1 }) , failed = false , self = this; transport.once('open', function () { if (failed) return; debug('probe transport "%s" opened', name); transport.send([{ type: 'ping', data: 'probe' }]); transport.once('packet', function (msg) { if (failed) return; if ('pong' == msg.type && 'probe' == msg.data) { debug('probe transport "%s" pong', name); self.upgrading = true; self.emit('upgrading', transport); debug('pausing current transport "%s"', self.transport.name); self.transport.pause(function () { if (failed) return; if ('closed' == self.readyState || 'closing' == self.readyState) { return; } debug('changing transport and sending upgrade packet'); transport.removeListener('error', onerror); self.emit('upgrade', transport); self.setTransport(transport); transport.send([{ type: 'upgrade' }]); transport = null; self.upgrading = false; self.flush(); }); } else { debug('probe transport "%s" failed', name); var err = new Error('probe error'); err.transport = transport.name; self.emit('error', err); } }); }); transport.once('error', onerror); function onerror(err) { if (failed) return; // Any callback called by transport should be ignored since now failed = true; var error = new Error('probe error: ' + err); error.transport = transport.name; transport.close(); transport = null; debug('probe transport "%s" failed because of error: %s', name, err); self.emit('error', error); }; transport.open(); this.once('close', function () { if (transport) { debug('socket closed prematurely - aborting probe'); failed = true; transport.close(); transport = null; } }); this.once('upgrading', function (to) { if (transport && to.name != transport.name) { debug('"%s" works - aborting "%s"', to.name, transport.name); transport.close(); transport = null; } }); }; /** * Called when connection is deemed open. * * @api public */ Socket.prototype.onOpen = function () { debug('socket open'); this.readyState = 'open'; this.emit('open'); this.onopen && this.onopen.call(this); this.flush(); // we check for `readyState` in case an `open` // listener alreay closed the socket if ('open' == this.readyState && this.upgrade && this.transport.pause) { debug('starting upgrade probes'); for (var i = 0, l = this.upgrades.length; i < l; i++) { this.probe(this.upgrades[i]); } } }; /** * Handles a packet. * * @api private */ Socket.prototype.onPacket = function (packet) { if ('opening' == this.readyState || 'open' == this.readyState) { debug('socket receive: type "%s", data "%s"', packet.type, packet.data); this.emit('packet', packet); // Socket is live - any packet counts this.emit('heartbeat'); switch (packet.type) { case 'open': this.onHandshake(util.parseJSON(packet.data)); break; case 'pong': this.ping(); break; case 'error': var err = new Error('server error'); err.code = packet.data; this.emit('error', err); break; case 'message': this.emit('message', packet.data); var event = { data: packet.data }; event.toString = function () { return packet.data; }; this.onmessage && this.onmessage.call(this, event); break; } } else { debug('packet received with socket readyState "%s"', this.readyState); } }; /** * Called upon handshake completion. * * @param {Object} handshake obj * @api private */ Socket.prototype.onHandshake = function (data) { this.emit('handshake', data); this.id = data.sid; this.transport.query.sid = data.sid; this.upgrades = data.upgrades; this.pingInterval = data.pingInterval; this.pingTimeout = data.pingTimeout; this.onOpen(); this.ping(); // Prolong liveness of socket on heartbeat this.removeListener('heartbeat', this.onHeartbeat); this.on('heartbeat', this.onHeartbeat); }; /** * Resets ping timeout. * * @api private */ Socket.prototype.onHeartbeat = function (timeout) { clearTimeout(this.pingTimeoutTimer); var self = this; self.pingTimeoutTimer = setTimeout(function () { if ('closed' == self.readyState) return; self.onClose('ping timeout'); }, timeout || (self.pingInterval + self.pingTimeout)); }; /** * Pings server every `this.pingInterval` and expects response * within `this.pingTimeout` or closes connection. * * @api private */ Socket.prototype.ping = function () { var self = this; clearTimeout(self.pingIntervalTimer); self.pingIntervalTimer = setTimeout(function () { debug('writing ping packet - expecting pong within %sms', self.pingTimeout); self.sendPacket('ping'); self.onHeartbeat(self.pingTimeout); }, self.pingInterval); }; /** * Flush write buffers. * * @api private */ Socket.prototype.flush = function () { if ('closed' != this.readyState && this.transport.writable && !this.upgrading && this.writeBuffer.length) { debug('flushing %d packets in socket', this.writeBuffer.length); this.transport.send(this.writeBuffer); this.writeBuffer = []; } }; /** * Sends a message. * * @param {String} message. * @return {Socket} for chaining. * @api public */ Socket.prototype.write = Socket.prototype.send = function (msg) { this.sendPacket('message', msg); return this; }; /** * Sends a packet. * * @param {String} packet type. * @param {String} data. * @api private */ Socket.prototype.sendPacket = function (type, data) { var packet = { type: type, data: data }; this.emit('packetCreate', packet); this.writeBuffer.push(packet); this.flush(); }; /** * Closes the connection. * * @api private */ Socket.prototype.close = function () { if ('opening' == this.readyState || 'open' == this.readyState) { this.onClose('forced close'); debug('socket closing - telling transport to close'); this.transport.close(); this.transport.removeAllListeners(); } return this; }; /** * Called upon transport error * * @api private */ Socket.prototype.onError = function (err) { this.emit('error', err); this.onClose('transport error', err); }; /** * Called upon transport close. * * @api private */ Socket.prototype.onClose = function (reason, desc) { if ('closed' != this.readyState) { debug('socket close with reason: "%s"', reason); clearTimeout(this.pingIntervalTimer); clearTimeout(this.pingTimeoutTimer); this.readyState = 'closed'; this.emit('close', reason, desc); this.onclose && this.onclose.call(this); this.id = null; } }; /** * Generates a random uid. * * @api private */ function rnd () { return String(Math.random()).substr(5) + String(Math.random()).substr(5); }
lib/socket.js
/** * Module dependencies. */ var util = require('./util') , transports = require('./transports') , Emitter = require('./emitter') , debug = require('debug')('engine-client:socket'); /** * Module exports. */ module.exports = Socket; /** * Socket constructor. * * @param {Object} options * @api public */ function Socket(opts){ if (!(this instanceof Socket)) return new Socket(opts); if ('string' == typeof opts) { var uri = util.parseUri(opts); opts = arguments[1] || {}; opts.host = uri.host; opts.secure = uri.protocol == 'https' || uri.protocol == 'wss'; opts.port = uri.port; } opts = opts || {}; this.secure = null != opts.secure ? opts.secure : (global.location && 'https:' == location.protocol); this.host = opts.host || opts.hostname || (global.location ? location.hostname : 'localhost'); this.port = opts.port || (global.location && location.port ? location.port : (this.secure ? 443 : 80)); this.query = opts.query || {}; this.query.uid = rnd(); this.upgrade = false !== opts.upgrade; this.resource = opts.resource || 'default'; this.path = (opts.path || '/engine.io').replace(/\/$/, ''); this.path += '/' + this.resource + '/'; this.forceJSONP = !!opts.forceJSONP; this.timestampParam = opts.timestampParam || 't'; this.timestampRequests = !!opts.timestampRequests; this.flashPath = opts.flashPath || ''; this.transports = opts.transports || ['polling', 'websocket', 'flashsocket']; this.readyState = ''; this.writeBuffer = []; this.policyPort = opts.policyPort || 843; this.open(); Socket.sockets.push(this); Socket.sockets.evs.emit('add', this); }; /** * Mix in `Emitter`. */ Emitter(Socket.prototype); /** * Static EventEmitter. */ Socket.sockets = []; Socket.sockets.evs = new Emitter; /** * Creates transport of the given type. * * @param {String} transport name * @return {Transport} * @api private */ Socket.prototype.createTransport = function (name) { debug('creating transport "%s"', name); var query = clone(this.query); query.transport = name; if (this.id) { query.sid = this.id; } var transport = new transports[name]({ host: this.host , port: this.port , secure: this.secure , path: this.path , query: query , forceJSONP: this.forceJSONP , timestampRequests: this.timestampRequests , timestampParam: this.timestampParam , flashPath: this.flashPath , policyPort: this.policyPort }); return transport; }; function clone (obj) { var o = {}; for (var i in obj) { if (obj.hasOwnProperty(i)) { o[i] = obj[i]; } } return o; } /** * Initializes transport to use and starts probe. * * @api private */ Socket.prototype.open = function () { this.readyState = 'opening'; var transport = this.createTransport(this.transports[0]); transport.open(); this.setTransport(transport); }; /** * Sets the current transport. Disables the existing one (if any). * * @api private */ Socket.prototype.setTransport = function (transport) { var self = this; if (this.transport) { debug('clearing existing transport'); this.transport.removeAllListeners(); } // set up transport this.transport = transport; // set up transport listeners transport .on('drain', function () { self.flush(); }) .on('packet', function (packet) { self.onPacket(packet); }) .on('error', function (e) { self.onError(e); }) .on('close', function () { self.onClose('transport close'); }); }; /** * Probes a transport. * * @param {String} transport name * @api private */ Socket.prototype.probe = function (name) { debug('probing transport "%s"', name); var transport = this.createTransport(name, { probe: 1 }) , failed = false , self = this; transport.once('open', function () { if (failed) return; debug('probe transport "%s" opened', name); transport.send([{ type: 'ping', data: 'probe' }]); transport.once('packet', function (msg) { if (failed) return; if ('pong' == msg.type && 'probe' == msg.data) { debug('probe transport "%s" pong', name); self.upgrading = true; self.emit('upgrading', transport); debug('pausing current transport "%s"', self.transport.name); self.transport.pause(function () { if (failed) return; if ('closed' == self.readyState || 'closing' == self.readyState) { return; } debug('changing transport and sending upgrade packet'); transport.removeListener('error', onerror); self.emit('upgrade', transport); self.setTransport(transport); transport.send([{ type: 'upgrade' }]); transport = null; self.upgrading = false; self.flush(); }); } else { debug('probe transport "%s" failed', name); var err = new Error('probe error'); err.transport = transport.name; self.emit('error', err); } }); }); transport.once('error', onerror); function onerror(err) { if (failed) return; // Any callback called by transport should be ignored since now failed = true; var error = new Error('probe error: ' + err); error.transport = transport.name; transport.close(); transport = null; debug('probe transport "%s" failed because of error: %s', name, err); self.emit('error', error); }; transport.open(); this.once('close', function () { if (transport) { debug('socket closed prematurely - aborting probe'); failed = true; transport.close(); transport = null; } }); this.once('upgrading', function (to) { if (transport && to.name != transport.name) { debug('"%s" works - aborting "%s"', to.name, transport.name); transport.close(); transport = null; } }); }; /** * Called when connection is deemed open. * * @api public */ Socket.prototype.onOpen = function () { debug('socket open'); this.readyState = 'open'; this.emit('open'); this.onopen && this.onopen.call(this); this.flush(); // we check for `readyState` in case an `open` // listener alreay closed the socket if ('open' == this.readyState && this.upgrade && this.transport.pause) { debug('starting upgrade probes'); for (var i = 0, l = this.upgrades.length; i < l; i++) { this.probe(this.upgrades[i]); } } }; /** * Handles a packet. * * @api private */ Socket.prototype.onPacket = function (packet) { if ('opening' == this.readyState || 'open' == this.readyState) { debug('socket receive: type "%s", data "%s"', packet.type, packet.data); this.emit('packet', packet); // Socket is live - any packet counts this.emit('heartbeat'); switch (packet.type) { case 'open': this.onHandshake(util.parseJSON(packet.data)); break; case 'pong': this.ping(); break; case 'error': var err = new Error('server error'); err.code = packet.data; this.emit('error', err); break; case 'message': this.emit('message', packet.data); var event = { data: packet.data }; event.toString = function () { return packet.data; }; this.onmessage && this.onmessage.call(this, event); break; } } else { debug('packet received with socket readyState "%s"', this.readyState); } }; /** * Called upon handshake completion. * * @param {Object} handshake obj * @api private */ Socket.prototype.onHandshake = function (data) { this.emit('handshake', data); this.id = data.sid; this.transport.query.sid = data.sid; this.upgrades = data.upgrades; this.pingInterval = data.pingInterval; this.pingTimeout = data.pingTimeout; this.onOpen(); this.ping(); // Prolong liveness of socket on heartbeat this.removeListener('heartbeat', this.onHeartbeat); this.on('heartbeat', this.onHeartbeat); }; /** * Resets ping timeout. * * @api private */ Socket.prototype.onHeartbeat = function (timeout) { clearTimeout(this.pingTimeoutTimer); var self = this; self.pingTimeoutTimer = setTimeout(function () { if ('closed' == self.readyState) return; self.onClose('ping timeout'); }, timeout || (self.pingInterval + self.pingTimeout)); }; /** * Pings server every `this.pingInterval` and expects response * within `this.pingTimeout` or closes connection. * * @api private */ Socket.prototype.ping = function () { var self = this; clearTimeout(self.pingIntervalTimer); self.pingIntervalTimer = setTimeout(function () { debug('writing ping packet - expecting pong within %sms', self.pingTimeout); self.sendPacket('ping'); self.onHeartbeat(self.pingTimeout); }, self.pingInterval); }; /** * Flush write buffers. * * @api private */ Socket.prototype.flush = function () { if ('closed' != this.readyState && this.transport.writable && !this.upgrading && this.writeBuffer.length) { debug('flushing %d packets in socket', this.writeBuffer.length); this.transport.send(this.writeBuffer); this.writeBuffer = []; } }; /** * Sends a message. * * @param {String} message. * @return {Socket} for chaining. * @api public */ Socket.prototype.write = Socket.prototype.send = function (msg) { this.sendPacket('message', msg); return this; }; /** * Sends a packet. * * @param {String} packet type. * @param {String} data. * @api private */ Socket.prototype.sendPacket = function (type, data) { var packet = { type: type, data: data }; this.emit('packetCreate', packet); this.writeBuffer.push(packet); this.flush(); }; /** * Closes the connection. * * @api private */ Socket.prototype.close = function () { if ('opening' == this.readyState || 'open' == this.readyState) { this.onClose('forced close'); debug('socket closing - telling transport to close'); this.transport.close(); this.transport.removeAllListeners(); } return this; }; /** * Called upon transport error * * @api private */ Socket.prototype.onError = function (err) { this.emit('error', err); this.onClose('transport error', err); }; /** * Called upon transport close. * * @api private */ Socket.prototype.onClose = function (reason, desc) { if ('closed' != this.readyState) { debug('socket close with reason: "%s"', reason); clearTimeout(this.pingIntervalTimer); clearTimeout(this.pingTimeoutTimer); this.readyState = 'closed'; this.emit('close', reason, desc); this.onclose && this.onclose.call(this); this.id = null; } }; /** * Generates a random uid. * * @api private */ function rnd () { return String(Math.random()).substr(5) + String(Math.random()).substr(5); }
socket: added compatibility with pre-1.0.0
lib/socket.js
socket: added compatibility with pre-1.0.0
<ide><path>ib/socket.js <ide> <ide> Socket.sockets = []; <ide> Socket.sockets.evs = new Emitter; <add> <add>/** <add> * Compat. <add> */ <add> <add>Socket.Socket = Socket; <ide> <ide> /** <ide> * Creates transport of the given type.
JavaScript
agpl-3.0
bd6dec352c6d769442b025146a9683464a5d58df
0
gracile-fr/zotero,gracile-fr/zotero,gracile-fr/zotero
/* ***** BEGIN LICENSE BLOCK ***** Copyright © 2012 Center for History and New Media George Mason University, Fairfax, Virginia, USA http://zotero.org This file is part of Zotero. Zotero is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Zotero is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with Zotero. If not, see <http://www.gnu.org/licenses/>. ***** END LICENSE BLOCK ***** */ Zotero.Translate.ItemSaver = function(libraryID, attachmentMode, forceTagType, document, cookieSandbox, baseURI) { // initialize constants this.newItems = []; this.newCollections = []; this._IDMap = {}; // determine library ID if(libraryID === false) { this._libraryID = false; } else if(libraryID === true || libraryID == undefined) { this._libraryID = null; } else { this._libraryID = libraryID; } // determine whether to save files and attachments if (attachmentMode == Zotero.Translate.ItemSaver.ATTACHMENT_MODE_DOWNLOAD) { this._saveAttachment = this._saveAttachmentDownload; } else if(attachmentMode == Zotero.Translate.ItemSaver.ATTACHMENT_MODE_FILE) { this._saveAttachment = this._saveAttachmentFile; } else { this._saveAttachment = function() {}; } this._saveFiles = !(attachmentMode === 0); // If group filesEditable==false, don't save attachments if (typeof this._libraryID == 'number') { var type = Zotero.Libraries.getType(this._libraryID); switch (type) { case 'group': var groupID = Zotero.Groups.getGroupIDFromLibraryID(this._libraryID); var group = Zotero.Groups.get(groupID); if (!group.filesEditable) { this._saveFiles = false; } break; } } // force tag types if requested this._forceTagType = forceTagType; // to set cookies on downloaded files this._cookieSandbox = cookieSandbox; // the URI to which other URIs are assumed to be relative if(typeof baseURI === "object" && baseURI instanceof Components.interfaces.nsIURI) { this._baseURI = baseURI; } else { // try to convert to a URI this._baseURI = null; try { this._baseURI = Components.classes["@mozilla.org/network/io-service;1"]. getService(Components.interfaces.nsIIOService).newURI(baseURI, null, null); } catch(e) {}; } }; Zotero.Translate.ItemSaver.ATTACHMENT_MODE_IGNORE = 0; Zotero.Translate.ItemSaver.ATTACHMENT_MODE_DOWNLOAD = 1; Zotero.Translate.ItemSaver.ATTACHMENT_MODE_FILE = 2; Zotero.Translate.ItemSaver.prototype = { /** * Saves items to Standalone or the server * @param items Items in Zotero.Item.toArray() format * @param {Function} callback A callback to be executed when saving is complete. If saving * succeeded, this callback will be passed true as the first argument and a list of items * saved as the second. If saving failed, the callback will be passed false as the first * argument and an error object as the second * @param {Function} [attachmentCallback] A callback that receives information about attachment * save progress. The callback will be called as attachmentCallback(attachment, false, error) * on failure or attachmentCallback(attachment, progressPercent) periodically during saving. */ "saveItems":function(items, callback, attachmentCallback) { // if no open transaction, open a transaction and add a timer call to close it var openedTransaction = false; if(!Zotero.DB.transactionInProgress()) { Zotero.DB.beginTransaction(); openedTransaction = true; } try { var newItems = []; for each(var item in items) { // Get typeID, defaulting to "webpage" var newItem; var type = (item.itemType ? item.itemType : "webpage"); if(type == "note") { // handle notes differently newItem = new Zotero.Item('note'); newItem.libraryID = this._libraryID; if(item.note) newItem.setNote(item.note); var myID = newItem.save(); newItem = Zotero.Items.get(myID); } else { if(type == "attachment") { // handle attachments differently newItem = this._saveAttachment(item, null, attachmentCallback); if(!newItem) continue; var myID = newItem.id; } else { var typeID = Zotero.ItemTypes.getID(type); newItem = new Zotero.Item(typeID); newItem._libraryID = this._libraryID; this._saveFields(item, newItem); // handle creators if(item.creators) { this._saveCreators(item, newItem); } // save item var myID = newItem.save(); newItem = Zotero.Items.get(myID); // handle notes if(item.notes) { this._saveNotes(item, myID); } // handle attachments if(item.attachments) { for(var i=0; i<item.attachments.length; i++) { var newAttachment = this._saveAttachment(item.attachments[i], myID, attachmentCallback); if(typeof newAttachment === "object") { this._saveTags(item.attachments[i], newAttachment); } } } } } if(item.itemID) this._IDMap[item.itemID] = myID; // handle see also this._saveTags(item, newItem); // add to new item list newItem = Zotero.Items.get(myID); newItems.push(newItem); } if(openedTransaction) Zotero.DB.commitTransaction(); callback(true, newItems); } catch(e) { if(openedTransaction) Zotero.DB.rollbackTransaction(); callback(false, e); } }, "saveCollection":function(collection) { var collectionsToProcess = [collection]; var parentIDs = [null]; var topLevelCollection; while(collectionsToProcess.length) { var collection = collectionsToProcess.shift(); var parentID = parentIDs.shift(); var newCollection = Zotero.Collections.add(collection.name, parentID); if(parentID === null) topLevelCollection = newCollection; this.newCollections.push(newCollection.id); var toAdd = []; for(var i=0; i<collection.children.length; i++) { var child = collection.children[i]; if(child.type === "collection") { // do recursive processing of collections collectionsToProcess.push(child); parentIDs.push(newCollection.id); } else { // add mapped items to collection if(this._IDMap[child.id]) { toAdd.push(this._IDMap[child.id]); } else { Zotero.debug("Translate: Could not map "+child.id+" to an imported item", 2); } } } if(toAdd.length) { Zotero.debug("Translate: Adding " + toAdd, 5); newCollection.addItems(toAdd); } } return topLevelCollection; }, "_saveAttachmentFile":function(attachment, parentID, attachmentCallback) { Zotero.debug("Translate: Adding attachment", 4); if(!attachment.url && !attachment.path) { let e = "Translate: Ignoring attachment: no path or URL specified"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } if (attachment.path) { var url = Zotero.Attachments.cleanAttachmentURI(attachment.path, false); if (url && /^(?:https?|ftp):/.test(url)) { // A web URL. Don't bother parsing it as path below // Some paths may look like URIs though, so don't just test for 'file' // E.g. C:\something if (!attachment.url) attachment.url = attachment.path; delete attachment.path; } } let done = false; if (attachment.path) { var file = this._parsePath(attachment.path); if(!file) { let asUrl = Zotero.Attachments.cleanAttachmentURI(attachment.path); if (!attachment.url && !asUrl) { let e = "Translate: Could not parse attachment path <" + attachment.path + ">"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } else if (!attachment.url && asUrl) { Zotero.debug("Translate: attachment path looks like a URI: " + attachment.path); attachment.url = asUrl; delete attachment.path; } } else { if (attachment.url) { attachment.linkMode = "imported_url"; var myID = Zotero.Attachments.importSnapshotFromFile(file, attachment.url, attachment.title, attachment.mimeType, attachment.charset, parentID); } else { attachment.linkMode = "imported_file"; var myID = Zotero.Attachments.importFromFile(file, parentID); } attachmentCallback(attachment, 100); done = true; } } if(!done) { let url = Zotero.Attachments.cleanAttachmentURI(attachment.url); if (!url) { let e = "Translate: Invalid attachment.url specified <" + attachment.url + ">"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } attachment.url = url; url = Components.classes["@mozilla.org/network/io-service;1"] .getService(Components.interfaces.nsIIOService) .newURI(url, null, null); // This cannot fail, since we check above // see if this is actually a file URL if(url.scheme == "file") { let e = "Translate: Local file attachments cannot be specified in attachment.url"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } else if(url.scheme != "http" && url.scheme != "https") { let e = "Translate: " + url.scheme + " protocol is not allowed for attachments from translators."; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } // At this point, must be a valid HTTP/HTTPS url attachment.linkMode = "linked_file"; try { var myID = Zotero.Attachments.linkFromURL(attachment.url, parentID, (attachment.mimeType ? attachment.mimeType : undefined), (attachment.title ? attachment.title : undefined)); } catch(e) { Zotero.debug("Translate: Error adding attachment "+attachment.url, 2); attachmentCallback(attachment, false, e); return false; } Zotero.debug("Translate: Created attachment; id is "+myID, 4); attachmentCallback(attachment, 100); } var newItem = Zotero.Items.get(myID); // save fields attachment.itemType = "attachment"; this._saveFields(attachment, newItem); // add note if necessary if(attachment.note) { newItem.setNote(attachment.note); } newItem.save(); return newItem; }, "_parsePathURI":function(path) { try { var uri = Services.io.newURI(path, "", this._baseURI); } catch(e) { Zotero.debug("Translate: " + path + " is not a valid URI"); return false; } try { var file = uri.QueryInterface(Components.interfaces.nsIFileURL).file; } catch (e) { Zotero.debug("Translate: " + uri.spec + " is not a file URI"); return false; } if(file.path == '/') { Zotero.debug("Translate: " + path + " points to root directory"); return false; } if(!file.exists()) { Zotero.debug("Translate: File at " + file.path + " does not exist"); return false; } return file; }, "_parseAbsolutePath":function(path) { var file = Components.classes["@mozilla.org/file/local;1"]. createInstance(Components.interfaces.nsILocalFile); try { file.initWithPath(path); } catch(e) { Zotero.debug("Translate: Invalid absolute path: " + path); return false; } if(!file.exists()) { Zotero.debug("Translate: File at absolute path " + file.path + " does not exist"); return false; } return file; }, "_parseRelativePath":function(path) { if (!this._baseURI) { Zotero.debug("Translate: Cannot parse as relative path. No base URI available."); return false; } var file = this._baseURI.QueryInterface(Components.interfaces.nsIFileURL).file.parent; var splitPath = path.split(/\//g); for(var i=0; i<splitPath.length; i++) { if(splitPath[i] !== "") file.append(splitPath[i]); } if(!file.exists()) { Zotero.debug("Translate: File at " + file.path + " does not exist"); return false; } return file; }, "_parsePath":function(path) { Zotero.debug("Translate: Attempting to parse path " + path); var file; // First, try to parse as absolute path if((/^[a-zA-Z]:[\\\/]|^\\\\/.test(path) && Zotero.isWin) // Paths starting with drive letter or network shares starting with \\ || (path[0] === "/" && !Zotero.isWin)) { // Forward slashes on Windows are not allowed in filenames, so we can // assume they're meant to be backslashes. Backslashes are technically // allowed on Linux, so the reverse cannot be done reliably. var nativePath = Zotero.isWin ? path.replace('/', '\\', 'g') : path; if (file = this._parseAbsolutePath(nativePath)) { Zotero.debug("Translate: Got file "+nativePath+" as absolute path"); return file; } } // Next, try to parse as URI if((file = this._parsePathURI(path))) { Zotero.debug("Translate: Got "+path+" as URI") return file; } else if(path.substr(0, 7) !== "file://") { // If it was a fully qualified file URI, we can give up now // Next, try to parse as relative path, replacing backslashes with slashes if((file = this._parseRelativePath(path.replace(/\\/g, "/")))) { Zotero.debug("Translate: Got file "+path+" as relative path"); return file; } // Next, try to parse as relative path, without replacing backslashes with slashes if((file = this._parseRelativePath(path))) { Zotero.debug("Translate: Got file "+path+" as relative path"); return file; } if(path[0] !== "/") { // Next, try to parse a path with no / as an absolute URI or path if((file = this._parsePathURI("/"+path))) { Zotero.debug("Translate: Got file "+path+" as broken URI"); return file; } if((file = this._parseAbsolutePath("/"+path))) { Zotero.debug("Translate: Got file "+path+" as broken absolute path"); return file; } } } // Give up Zotero.debug("Translate: Could not find file "+path) return false; }, "_saveAttachmentDownload":function(attachment, parentID, attachmentCallback) { Zotero.debug("Translate: Adding attachment", 4); if(!attachment.url && !attachment.document) { Zotero.debug("Translate: Not adding attachment: no URL specified", 2); } else { // Determine whether to save an attachment if(attachment.snapshot !== false) { if(attachment.document || (attachment.mimeType && (attachment.mimeType === "text/html" || attachment.mimeType == "application/xhtml+xml"))) { if(!Zotero.Prefs.get("automaticSnapshots")) return; } else { if(!Zotero.Prefs.get("downloadAssociatedFiles")) return; } } var doc = undefined; if(attachment.document) { doc = new XPCNativeWrapper(Zotero.Translate.DOMWrapper.unwrap(attachment.document)); if(!attachment.title) attachment.title = doc.title; } var title = attachment.title || null; if(!title) { // If no title provided, use "Attachment" as title for progress UI (but not for item) attachment.title = Zotero.getString("itemTypes.attachment"); } if(attachment.snapshot === false || !this._saveFiles) { // if snapshot is explicitly set to false, attach as link attachment.linkMode = "linked_url"; let url, mimeType; if(doc) { url = doc.location.href; mimeType = attachment.mimeType ? attachment.mimeType : doc.contentType; } else { url = attachment.url mimeType = attachment.mimeType ? attachment.mimeType : undefined; } let cleanURI = Zotero.Attachments.cleanAttachmentURI(url); if (!cleanURI) { let e = "Translate: Invalid attachment URL specified <" + url + ">"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } url = Components.classes["@mozilla.org/network/io-service;1"] .getService(Components.interfaces.nsIIOService) .newURI(cleanURI, null, null); // This cannot fail, since we check above // Only HTTP/HTTPS links are allowed if(url.scheme != "http" && url.scheme != "https") { let e = "Translate: " + url.scheme + " protocol is not allowed for attachments from translators."; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } try { Zotero.Attachments.linkFromURL(cleanURI, parentID, mimeType, title); attachmentCallback(attachment, 100); } catch(e) { Zotero.debug("Translate: Error adding attachment "+attachment.url, 2); attachmentCallback(attachment, false, e); return false; } return true; } else { // if snapshot is not explicitly set to false, retrieve snapshot if(doc) { try { attachment.linkMode = "imported_url"; Zotero.Attachments.importFromDocument(doc, parentID, title, null, function(status, err) { if(status) { attachmentCallback(attachment, 100); } else { attachmentCallback(attachment, false, err); } }, this._libraryID); attachmentCallback(attachment, 0); } catch(e) { Zotero.debug("Translate: Error attaching document", 2); attachmentCallback(attachment, false, e); } return true; // Save attachment if snapshot pref enabled or not HTML // (in which case downloadAssociatedFiles applies) } else { var mimeType = (attachment.mimeType ? attachment.mimeType : null); var fileBaseName = Zotero.Attachments.getFileBaseNameFromItem(parentID); try { Zotero.debug('Importing attachment from URL'); attachment.linkMode = "imported_url"; Zotero.Attachments.importFromURL(attachment.url, parentID, title, fileBaseName, null, mimeType, this._libraryID, function(status, err) { // TODO: actually indicate progress during download if(status) { attachmentCallback(attachment, 100); } else { attachmentCallback(attachment, false, err); } }, this._cookieSandbox); attachmentCallback(attachment, 0); } catch(e) { Zotero.debug("Translate: Error adding attachment "+attachment.url, 2); attachmentCallback(attachment, false, e); } return true; } } } return false; }, "_saveFields":function(item, newItem) { // fields that should be handled differently const skipFields = ["note", "notes", "itemID", "attachments", "tags", "seeAlso", "itemType", "complete", "creators"]; var typeID = Zotero.ItemTypes.getID(item.itemType); var fieldID; for(var field in item) { // loop through item fields if(item[field] && skipFields.indexOf(field) === -1 && (fieldID = Zotero.ItemFields.getID(field))) { // if field is in db and shouldn't be skipped // try to map from base field if(Zotero.ItemFields.isBaseField(fieldID)) { fieldID = Zotero.ItemFields.getFieldIDFromTypeAndBase(typeID, fieldID); // Skip mapping if item field already exists var fieldName = Zotero.ItemFields.getName(fieldID); if(fieldName !== field && item[fieldName]) continue; if(fieldID) { Zotero.debug("Translate: Mapping "+field+" to "+fieldName, 5); } } // if field is valid for this type, set field if(fieldID && Zotero.ItemFields.isValidForType(fieldID, typeID)) { newItem.setField(fieldID, item[field]); } else { Zotero.debug("Translate: Discarded field "+field+" for item: field not valid for type "+item.itemType, 3); } } } }, "_saveCreators":function(item, newItem) { var creatorIndex = 0; for(var i=0; i<item.creators.length; i++) { var creator = item.creators[i]; if(!creator.firstName && !creator.lastName) { Zotero.debug("Translate: Silently dropping empty creator"); continue; } // try to assign correct creator type var creatorTypeID = 1; if(creator.creatorType) { try { var creatorTypeID = Zotero.CreatorTypes.getID(creator.creatorType); } catch(e) { Zotero.debug("Translate: Invalid creator type "+creator.creatorType+" for creator index "+j, 2); } } // Single-field mode if (creator.fieldMode && creator.fieldMode == 1) { var fields = { lastName: creator.lastName, fieldMode: 1 }; } // Two-field mode else { var fields = { firstName: creator.firstName, lastName: creator.lastName }; } var creator = null; var creatorDataID = Zotero.Creators.getDataID(fields); if(creatorDataID) { var linkedCreators = Zotero.Creators.getCreatorsWithData(creatorDataID, this._libraryID); if (linkedCreators) { // TODO: support identical creators via popup? ugh... var creatorID = linkedCreators[0]; creator = Zotero.Creators.get(creatorID); } } if(!creator) { creator = new Zotero.Creator; creator.libraryID = this._libraryID; creator.setFields(fields); var creatorID = creator.save(); } newItem.setCreator(creatorIndex++, creator, creatorTypeID); } }, "_saveNotes":function(item, parentID) { for(var i=0; i<item.notes.length; i++) { var note = item.notes[i]; if(!note) continue; var myNote = new Zotero.Item('note'); myNote.libraryID = this._libraryID; myNote.setNote(typeof note == "object" ? note.note : note); if(parentID) { myNote.setSource(parentID); } var noteID = myNote.save(); if(typeof note == "object") { // handle see also myNote = Zotero.Items.get(noteID); this._saveTags(note, myNote); } } }, "_saveTags":function(item, newItem) { // add to ID map if(item.itemID) { this._IDMap[item.itemID] = newItem.id; } // add see alsos if(item.seeAlso) { for(var i=0; i<item.seeAlso.length; i++) { var seeAlso = item.seeAlso[i]; if(this._IDMap[seeAlso]) { newItem.addRelatedItem(this._IDMap[seeAlso]); } } newItem.save(); } // if all tags are automatic and automatic tags pref is on, return immediately var tagPref = Zotero.Prefs.get("automaticTags"); if(this._forceTagType == 1 && !tagPref) return; // add tags if(item.tags) { var tagsToAdd = {}; tagsToAdd[0] = []; // user tags tagsToAdd[1] = []; // automatic tags for(var i=0; i<item.tags.length; i++) { var tag = item.tags[i]; if(typeof(tag) == "string") { // accept strings in tag array as automatic tags, or, if // importing, as non-automatic tags if(this._forceTagType) { tagsToAdd[this._forceTagType].push(tag); } else { tagsToAdd[0].push(tag); } } else if(typeof(tag) == "object") { // also accept objects if(tag.tag || tag.name) { if(this._forceTagType) { var tagType = this._forceTagType; } else if(tag.type) { // skip automatic tags during import too (?) if(tag.type == 1 && !tagPref) continue; var tagType = tag.type; } else { var tagType = 0; } tagsToAdd[tagType].push(tag.tag ? tag.tag : tag.name); } } } for (var type in [0, 1]) { if (tagsToAdd[type].length) { newItem.addTags(tagsToAdd[type], type); } } } } } Zotero.Translate.ItemGetter = function() { this._itemsLeft = []; this._collectionsLeft = null; this._exportFileDirectory = null; this.legacy = false; }; Zotero.Translate.ItemGetter.prototype = { "setItems":function(items) { this._itemsLeft = items; this._itemsLeft.sort(function(a, b) { return a.id - b.id; }); this.numItems = this._itemsLeft.length; }, "setCollection":function(collection, getChildCollections) { // get items in this collection var haveItems = {}; this._itemsLeft = collection.getChildItems(); for each(var item in this._itemsLeft) haveItems[item.id] = true; if(!this._itemsLeft) { this._itemsLeft = []; } if(getChildCollections) { // get child collections this._collectionsLeft = Zotero.getCollections(collection.id, true); // get items in child collections for each(var collection in this._collectionsLeft) { var childItems = collection.getChildItems(); if(childItems) { for each(var item in childItems) { if(!haveItems[item.id]) { haveItems[item.id] = true; this._itemsLeft.push(item);; } } } } } this._itemsLeft.sort(function(a, b) { return a.id - b.id; }); this.numItems = this._itemsLeft.length; }, "setAll":function(libraryID, getChildCollections) { this._itemsLeft = Zotero.Items.getAll(true, libraryID); if(getChildCollections) { this._collectionsLeft = Zotero.getCollections(null, true, libraryID); } this._itemsLeft.sort(function(a, b) { return a.id - b.id; }); this.numItems = this._itemsLeft.length; }, "exportFiles":function(dir, extension) { // generate directory this._exportFileDirectory = Components.classes["@mozilla.org/file/local;1"]. createInstance(Components.interfaces.nsILocalFile); this._exportFileDirectory.initWithFile(dir.parent); // delete this file if it exists if(dir.exists()) { dir.remove(true); } // get name var name = dir.leafName; this._exportFileDirectory.append(name); // create directory this._exportFileDirectory.create(Components.interfaces.nsIFile.DIRECTORY_TYPE, 0700); // generate a new location for the exported file, with the appropriate // extension var location = Components.classes["@mozilla.org/file/local;1"]. createInstance(Components.interfaces.nsILocalFile); location.initWithFile(this._exportFileDirectory); location.append(name+"."+extension); return location; }, /** * Converts an attachment to array format and copies it to the export folder if desired */ "_attachmentToArray":function(attachment) { var attachmentArray = Zotero.Utilities.Internal.itemToExportFormat(attachment, this.legacy); var linkMode = attachment.attachmentLinkMode; if(linkMode != Zotero.Attachments.LINK_MODE_LINKED_URL) { var attachFile = attachment.getFile(); attachmentArray.localPath = attachFile.path; if(this._exportFileDirectory) { var exportDir = this._exportFileDirectory; // Add path and filename if not an internet link var attachFile = attachment.getFile(); if(attachFile) { attachmentArray.defaultPath = "files/" + attachment.id + "/" + attachFile.leafName; attachmentArray.filename = attachFile.leafName; /** * Copies the attachment file to the specified relative path from the * export directory. * @param {String} attachPath The path to which the file should be exported * including the filename. If supporting files are included, they will be * copied as well without any renaming. * @param {Boolean} overwriteExisting Optional - If this is set to false, the * function will throw an error when exporting a file would require an existing * file to be overwritten. If true, the file will be silently overwritten. * defaults to false if not provided. */ attachmentArray.saveFile = function(attachPath, overwriteExisting) { // Ensure a valid path is specified if(attachPath === undefined || attachPath == "") { throw new Error("ERROR_EMPTY_PATH"); } // Set the default value of overwriteExisting if it was not provided if (overwriteExisting === undefined) { overwriteExisting = false; } // Separate the path into a list of subdirectories and the attachment filename, // and initialize the required file objects var targetFile = Components.classes["@mozilla.org/file/local;1"]. createInstance(Components.interfaces.nsILocalFile); targetFile.initWithFile(exportDir); for each(var dir in attachPath.split("/")) targetFile.append(dir); // First, check that we have not gone lower than exportDir in the hierarchy var parent = targetFile, inExportFileDirectory; while((parent = parent.parent)) { if(exportDir.equals(parent)) { inExportFileDirectory = true; break; } } if(!inExportFileDirectory) { throw new Error("Invalid path; attachment cannot be placed above export "+ "directory in the file hirarchy"); } // Create intermediate directories if they don't exist parent = targetFile; while((parent = parent.parent) && !parent.exists()) { parent.create(Components.interfaces.nsIFile.DIRECTORY_TYPE, 0700); } // Delete any existing file if overwriteExisting is set, or throw an exception // if it is not if(targetFile.exists()) { if(overwriteExisting) { targetFile.remove(false); } else { throw new Error("ERROR_FILE_EXISTS " + targetFile.leafName); } } var directory = targetFile.parent; // The only attachments that can have multiple supporting files are imported // attachments of mime type text/html (specified in Attachments.getNumFiles()) if(attachment.attachmentMIMEType == "text/html" && linkMode != Zotero.Attachments.LINK_MODE_LINKED_FILE && Zotero.Attachments.getNumFiles(attachment) > 1) { // Attachment is a snapshot with supporting files. Check if any of the // supporting files would cause a name conflict, and build a list of transfers // that should be performed var copySrcs = []; var files = attachment.getFile().parent.directoryEntries; while (files.hasMoreElements()) { file = files.getNext(); file.QueryInterface(Components.interfaces.nsIFile); // Ignore the main attachment file (has already been checked for name conflict) if(attachFile.equals(file)) { continue; } // Remove any existing files in the target destination if overwriteExisting // is set, or throw an exception if it is not var targetSupportFile = targetFile.parent.clone(); targetSupportFile.append(file.leafName); if(targetSupportFile.exists()) { if(overwriteExisting) { targetSupportFile.remove(false); } else { throw new Error("ERROR_FILE_EXISTS " + targetSupportFile.leafName); } } copySrcs.push(file.clone()); } // No conflicts were detected or all conflicts were resolved, perform the copying attachFile.copyTo(directory, targetFile.leafName); for(var i = 0; i < copySrcs.length; i++) { copySrcs[i].copyTo(directory, copySrcs[i].leafName); } } else { // Attachment is a single file // Copy the file to the specified location attachFile.copyTo(directory, targetFile.leafName); } attachmentArray.path = targetFile.path; }; } } } return attachmentArray; }, /** * Retrieves the next available item */ "nextItem":function() { while(this._itemsLeft.length != 0) { var returnItem = this._itemsLeft.shift(); // export file data for single files if(returnItem.isAttachment()) { // an independent attachment var returnItemArray = this._attachmentToArray(returnItem); if(returnItemArray) return returnItemArray; } else { var returnItemArray = Zotero.Utilities.Internal.itemToExportFormat(returnItem, this.legacy); // get attachments, although only urls will be passed if exportFileData is off returnItemArray.attachments = []; var attachments = returnItem.getAttachments(); for each(var attachmentID in attachments) { var attachment = Zotero.Items.get(attachmentID); var attachmentInfo = this._attachmentToArray(attachment); if(attachmentInfo) { returnItemArray.attachments.push(attachmentInfo); } } return returnItemArray; } } return false; }, "nextCollection":function() { if(!this._collectionsLeft || this._collectionsLeft.length == 0) return false; var returnItem = this._collectionsLeft.shift(); var obj = returnItem.serialize(true); obj.id = obj.primary.collectionID; obj.name = obj.fields.name; return obj; } } Zotero.Translate.ItemGetter.prototype.__defineGetter__("numItemsRemaining", function() { return this._itemsLeft.length });
chrome/content/zotero/xpcom/translation/translate_item.js
/* ***** BEGIN LICENSE BLOCK ***** Copyright © 2012 Center for History and New Media George Mason University, Fairfax, Virginia, USA http://zotero.org This file is part of Zotero. Zotero is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Zotero is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with Zotero. If not, see <http://www.gnu.org/licenses/>. ***** END LICENSE BLOCK ***** */ Zotero.Translate.ItemSaver = function(libraryID, attachmentMode, forceTagType, document, cookieSandbox, baseURI) { // initialize constants this.newItems = []; this.newCollections = []; this._IDMap = {}; // determine library ID if(libraryID === false) { this._libraryID = false; } else if(libraryID === true || libraryID == undefined) { this._libraryID = null; } else { this._libraryID = libraryID; } // determine whether to save files and attachments if (attachmentMode == Zotero.Translate.ItemSaver.ATTACHMENT_MODE_DOWNLOAD) { this._saveAttachment = this._saveAttachmentDownload; } else if(attachmentMode == Zotero.Translate.ItemSaver.ATTACHMENT_MODE_FILE) { this._saveAttachment = this._saveAttachmentFile; } else { this._saveAttachment = function() {}; } this._saveFiles = !(attachmentMode === 0); // If group filesEditable==false, don't save attachments if (typeof this._libraryID == 'number') { var type = Zotero.Libraries.getType(this._libraryID); switch (type) { case 'group': var groupID = Zotero.Groups.getGroupIDFromLibraryID(this._libraryID); var group = Zotero.Groups.get(groupID); if (!group.filesEditable) { this._saveFiles = false; } break; } } // force tag types if requested this._forceTagType = forceTagType; // to set cookies on downloaded files this._cookieSandbox = cookieSandbox; // the URI to which other URIs are assumed to be relative if(typeof baseURI === "object" && baseURI instanceof Components.interfaces.nsIURI) { this._baseURI = baseURI; } else { // try to convert to a URI this._baseURI = null; try { this._baseURI = Components.classes["@mozilla.org/network/io-service;1"]. getService(Components.interfaces.nsIIOService).newURI(baseURI, null, null); } catch(e) {}; } }; Zotero.Translate.ItemSaver.ATTACHMENT_MODE_IGNORE = 0; Zotero.Translate.ItemSaver.ATTACHMENT_MODE_DOWNLOAD = 1; Zotero.Translate.ItemSaver.ATTACHMENT_MODE_FILE = 2; Zotero.Translate.ItemSaver.prototype = { /** * Saves items to Standalone or the server * @param items Items in Zotero.Item.toArray() format * @param {Function} callback A callback to be executed when saving is complete. If saving * succeeded, this callback will be passed true as the first argument and a list of items * saved as the second. If saving failed, the callback will be passed false as the first * argument and an error object as the second * @param {Function} [attachmentCallback] A callback that receives information about attachment * save progress. The callback will be called as attachmentCallback(attachment, false, error) * on failure or attachmentCallback(attachment, progressPercent) periodically during saving. */ "saveItems":function(items, callback, attachmentCallback) { // if no open transaction, open a transaction and add a timer call to close it var openedTransaction = false; if(!Zotero.DB.transactionInProgress()) { Zotero.DB.beginTransaction(); openedTransaction = true; } try { var newItems = []; for each(var item in items) { // Get typeID, defaulting to "webpage" var newItem; var type = (item.itemType ? item.itemType : "webpage"); if(type == "note") { // handle notes differently newItem = new Zotero.Item('note'); newItem.libraryID = this._libraryID; if(item.note) newItem.setNote(item.note); var myID = newItem.save(); newItem = Zotero.Items.get(myID); } else { if(type == "attachment") { // handle attachments differently newItem = this._saveAttachment(item, null, attachmentCallback); if(!newItem) continue; var myID = newItem.id; } else { var typeID = Zotero.ItemTypes.getID(type); newItem = new Zotero.Item(typeID); newItem._libraryID = this._libraryID; this._saveFields(item, newItem); // handle creators if(item.creators) { this._saveCreators(item, newItem); } // save item var myID = newItem.save(); newItem = Zotero.Items.get(myID); // handle notes if(item.notes) { this._saveNotes(item, myID); } // handle attachments if(item.attachments) { for(var i=0; i<item.attachments.length; i++) { var newAttachment = this._saveAttachment(item.attachments[i], myID, attachmentCallback); if(typeof newAttachment === "object") { this._saveTags(item.attachments[i], newAttachment); } } } } } if(item.itemID) this._IDMap[item.itemID] = myID; // handle see also this._saveTags(item, newItem); // add to new item list newItem = Zotero.Items.get(myID); newItems.push(newItem); } if(openedTransaction) Zotero.DB.commitTransaction(); callback(true, newItems); } catch(e) { if(openedTransaction) Zotero.DB.rollbackTransaction(); callback(false, e); } }, "saveCollection":function(collection) { var collectionsToProcess = [collection]; var parentIDs = [null]; var topLevelCollection; while(collectionsToProcess.length) { var collection = collectionsToProcess.shift(); var parentID = parentIDs.shift(); var newCollection = Zotero.Collections.add(collection.name, parentID); if(parentID === null) topLevelCollection = newCollection; this.newCollections.push(newCollection.id); var toAdd = []; for(var i=0; i<collection.children.length; i++) { var child = collection.children[i]; if(child.type === "collection") { // do recursive processing of collections collectionsToProcess.push(child); parentIDs.push(newCollection.id); } else { // add mapped items to collection if(this._IDMap[child.id]) { toAdd.push(this._IDMap[child.id]); } else { Zotero.debug("Translate: Could not map "+child.id+" to an imported item", 2); } } } if(toAdd.length) { Zotero.debug("Translate: Adding " + toAdd, 5); newCollection.addItems(toAdd); } } return topLevelCollection; }, "_saveAttachmentFile":function(attachment, parentID, attachmentCallback) { Zotero.debug("Translate: Adding attachment", 4); if(!attachment.url && !attachment.path) { let e = "Translate: Ignoring attachment: no path or URL specified"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } if (attachment.path) { var url = Zotero.Attachments.cleanAttachmentURI(attachment.path, false); if (url && /^(?:https?|ftp):/.test(url)) { // A web URL. Don't bother parsing it as path below // Some paths may look like URIs though, so don't just test for 'file' // E.g. C:\something if (!attachment.url) attachment.url = attachment.path; delete attachment.path; } } let done = false; if (attachment.path) { var file = this._parsePath(attachment.path); if(!file) { let asUrl = Zotero.Attachments.cleanAttachmentURI(attachment.path); if (!attachment.url && !asUrl) { let e = "Translate: Could not parse attachment path <" + attachment.path + ">"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } else if (!attachment.url && asUrl) { Zotero.debug("Translate: attachment path looks like a URI: " + attachment.path); attachment.url = asUrl; delete attachment.path; } } else { if (attachment.url) { attachment.linkMode = "imported_url"; var myID = Zotero.Attachments.importSnapshotFromFile(file, attachment.url, attachment.title, attachment.mimeType, attachment.charset, parentID); } else { attachment.linkMode = "imported_file"; var myID = Zotero.Attachments.importFromFile(file, parentID); } attachmentCallback(attachment, 100); done = true; } } if(!done) { let url = Zotero.Attachments.cleanAttachmentURI(attachment.url); if (!url) { let e = "Translate: Invalid attachment.url specified <" + attachment.url + ">"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } attachment.url = url; url = Components.classes["@mozilla.org/network/io-service;1"] .getService(Components.interfaces.nsIIOService) .newURI(url, null, null); // This cannot fail, since we check above // see if this is actually a file URL if(url.scheme == "file") { let e = "Translate: Local file attachments cannot be specified in attachment.url"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } else if(url.scheme != "http" && url.scheme != "https") { let e = "Translate: " + url.scheme + " protocol is not allowed for attachments from translators."; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } // At this point, must be a valid HTTP/HTTPS url attachment.linkMode = "linked_file"; try { var myID = Zotero.Attachments.linkFromURL(attachment.url, parentID, (attachment.mimeType ? attachment.mimeType : undefined), (attachment.title ? attachment.title : undefined)); } catch(e) { Zotero.debug("Translate: Error adding attachment "+attachment.url, 2); attachmentCallback(attachment, false, e); return false; } Zotero.debug("Translate: Created attachment; id is "+myID, 4); attachmentCallback(attachment, 100); } var newItem = Zotero.Items.get(myID); // save fields attachment.itemType = "attachment"; this._saveFields(attachment, newItem); // add note if necessary if(attachment.note) { newItem.setNote(attachment.note); } newItem.save(); return newItem; }, "_parsePathURI":function(path) { try { var uri = Services.io.newURI(path, "", this._baseURI); } catch(e) { Zotero.debug("Translate: " + path + " is not a valid URI"); return false; } try { var file = uri.QueryInterface(Components.interfaces.nsIFileURL).file; } catch (e) { Zotero.debug("Translate: " + uri.spec + " is not a file URI"); return false; } if(file.path == '/') { Zotero.debug("Translate: " + path + " points to root directory"); return false; } if(!file.exists()) { Zotero.debug("Translate: File at " + file.path + " does not exist"); return false; } return file; }, "_parseAbsolutePath":function(path) { var file = Components.classes["@mozilla.org/file/local;1"]. createInstance(Components.interfaces.nsILocalFile); try { file.initWithPath(path); } catch(e) { Zotero.debug("Translate: Invalid absolute path: " + path); return false; } if(!file.exists()) { Zotero.debug("Translate: File at absolute path " + file.path + " does not exist"); return false; } return file; }, "_parseRelativePath":function(path) { if (!this._baseURI) { Zotero.debug("Translate: Cannot parse as relative path. No base URI available."); return false; } var file = this._baseURI.QueryInterface(Components.interfaces.nsIFileURL).file.parent; var splitPath = path.split(/\//g); for(var i=0; i<splitPath.length; i++) { if(splitPath[i] !== "") file.append(splitPath[i]); } if(!file.exists()) { Zotero.debug("Translate: File at " + file.path + " does not exist"); return false; } return file; }, "_parsePath":function(path) { Zotero.debug("Translate: Attempting to parse path " + path); var file; // First, try to parse as absolute path if((/^[a-zA-Z]:[\\\/]|^\\\\/.test(path) && Zotero.isWin) // Paths starting with drive letter or network shares starting with \\ || (path[0] === "/" && !Zotero.isWin)) { // Forward slashes on Windows are not allowed in filenames, so we can // assume they're meant to be backslashes. Backslashes are technically // allowed on Linux, so the reverse cannot be done reliably. var nativePath = Zotero.isWin ? path.replace('/', '\\', 'g') : path; if (file = this._parseAbsolutePath(nativePath)) { Zotero.debug("Translate: Got file "+nativePath+" as absolute path"); return file; } } // Next, try to parse as URI if((file = this._parsePathURI(path))) { Zotero.debug("Translate: Got "+path+" as URI") return file; } else if(path.substr(0, 7) !== "file://") { // If it was a fully qualified file URI, we can give up now // Next, try to parse as relative path, replacing backslashes with slashes if((file = this._parseRelativePath(path.replace(/\\/g, "/")))) { Zotero.debug("Translate: Got file "+path+" as relative path"); return file; } // Next, try to parse as relative path, without replacing backslashes with slashes if((file = this._parseRelativePath(path))) { Zotero.debug("Translate: Got file "+path+" as relative path"); return file; } if(path[0] !== "/") { // Next, try to parse a path with no / as an absolute URI or path if((file = this._parsePathURI("/"+path))) { Zotero.debug("Translate: Got file "+path+" as broken URI"); return file; } if((file = this._parseAbsolutePath("/"+path))) { Zotero.debug("Translate: Got file "+path+" as broken absolute path"); return file; } } } // Give up Zotero.debug("Translate: Could not find file "+path) return false; }, "_saveAttachmentDownload":function(attachment, parentID, attachmentCallback) { Zotero.debug("Translate: Adding attachment", 4); if(!attachment.url && !attachment.document) { Zotero.debug("Translate: Not adding attachment: no URL specified", 2); } else { // Determine whether to save an attachment if(attachment.snapshot !== false) { if(attachment.document || (attachment.mimeType && (attachment.mimeType === "text/html" || attachment.mimeType == "application/xhtml+xml"))) { if(!Zotero.Prefs.get("automaticSnapshots")) return; } else { if(!Zotero.Prefs.get("downloadAssociatedFiles")) return; } } var doc = undefined; if(attachment.document) { doc = new XPCNativeWrapper(Zotero.Translate.DOMWrapper.unwrap(attachment.document)); if(!attachment.title) attachment.title = doc.title; } var title = attachment.title || null; if(!title) { // If no title provided, use "Attachment" as title for progress UI (but not for item) attachment.title = Zotero.getString("itemTypes.attachment"); } if(attachment.snapshot === false || !this._saveFiles) { // if snapshot is explicitly set to false, attach as link attachment.linkMode = "linked_url"; let url, mimeType; if(doc) { url = doc.location.href; mimeType = attachment.mimeType ? attachment.mimeType : doc.contentType; } else { url = attachment.url mimeType = attachment.mimeType ? attachment.mimeType : undefined; } let cleanURI = Zotero.Attachments.cleanAttachmentURI(url); if (!cleanURI) { let e = "Translate: Invalid attachment URL specified <" + url + ">"; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } url = Components.classes["@mozilla.org/network/io-service;1"] .getService(Components.interfaces.nsIIOService) .newURI(cleanURI, null, null); // This cannot fail, since we check above // Only HTTP/HTTPS links are allowed if(url.scheme != "http" && url.scheme != "https") { let e = "Translate: " + url.scheme + " protocol is not allowed for attachments from translators."; Zotero.debug(e, 2); attachmentCallback(attachment, false, e); return false; } try { Zotero.Attachments.linkFromURL(cleanURI, parentID, mimeType, title); attachmentCallback(attachment, 100); } catch(e) { Zotero.debug("Translate: Error adding attachment "+attachment.url, 2); attachmentCallback(attachment, false, e); return false; } return true; } else { // if snapshot is not explicitly set to false, retrieve snapshot if(doc) { try { attachment.linkMode = "imported_url"; Zotero.Attachments.importFromDocument(doc, parentID, title, null, function(status, err) { if(status) { attachmentCallback(attachment, 100); } else { attachmentCallback(attachment, false, err); } }, this._libraryID); attachmentCallback(attachment, 0); } catch(e) { Zotero.debug("Translate: Error attaching document", 2); attachmentCallback(attachment, false, e); } return true; // Save attachment if snapshot pref enabled or not HTML // (in which case downloadAssociatedFiles applies) } else { var mimeType = (attachment.mimeType ? attachment.mimeType : null); var fileBaseName = Zotero.Attachments.getFileBaseNameFromItem(parentID); try { Zotero.debug('Importing attachment from URL'); attachment.linkMode = "imported_url"; Zotero.Attachments.importFromURL(attachment.url, parentID, title, fileBaseName, null, mimeType, this._libraryID, function(status, err) { // TODO: actually indicate progress during download if(status) { attachmentCallback(attachment, 100); } else { attachmentCallback(attachment, false, err); } }, this._cookieSandbox); attachmentCallback(attachment, 0); } catch(e) { Zotero.debug("Translate: Error adding attachment "+attachment.url, 2); attachmentCallback(attachment, false, e); } return true; } } } return false; }, "_saveFields":function(item, newItem) { // fields that should be handled differently const skipFields = ["note", "notes", "itemID", "attachments", "tags", "seeAlso", "itemType", "complete", "creators"]; var typeID = Zotero.ItemTypes.getID(item.itemType); var fieldID; for(var field in item) { // loop through item fields if(item[field] && skipFields.indexOf(field) === -1 && (fieldID = Zotero.ItemFields.getID(field))) { // if field is in db and shouldn't be skipped // try to map from base field if(Zotero.ItemFields.isBaseField(fieldID)) { fieldID = Zotero.ItemFields.getFieldIDFromTypeAndBase(typeID, fieldID); // Skip mapping if item field already exists var fieldName = Zotero.ItemFields.getName(fieldID); if(fieldName !== field && item[fieldName]) continue; if(fieldID) { Zotero.debug("Translate: Mapping "+field+" to "+fieldName, 5); } } // if field is valid for this type, set field if(fieldID && Zotero.ItemFields.isValidForType(fieldID, typeID)) { newItem.setField(fieldID, item[field]); } else { Zotero.debug("Translate: Discarded field "+field+" for item: field not valid for type "+item.itemType, 3); } } } }, "_saveCreators":function(item, newItem) { var creatorIndex = 0; for(var i=0; i<item.creators.length; i++) { var creator = item.creators[i]; if(!creator.firstName && !creator.lastName) { Zotero.debug("Translate: Silently dropping empty creator"); continue; } // try to assign correct creator type var creatorTypeID = 1; if(creator.creatorType) { try { var creatorTypeID = Zotero.CreatorTypes.getID(creator.creatorType); } catch(e) { Zotero.debug("Translate: Invalid creator type "+creator.creatorType+" for creator index "+j, 2); } } // Single-field mode if (creator.fieldMode && creator.fieldMode == 1) { var fields = { lastName: creator.lastName, fieldMode: 1 }; } // Two-field mode else { var fields = { firstName: creator.firstName, lastName: creator.lastName }; } var creator = null; var creatorDataID = Zotero.Creators.getDataID(fields); if(creatorDataID) { var linkedCreators = Zotero.Creators.getCreatorsWithData(creatorDataID, this._libraryID); if (linkedCreators) { // TODO: support identical creators via popup? ugh... var creatorID = linkedCreators[0]; creator = Zotero.Creators.get(creatorID); } } if(!creator) { creator = new Zotero.Creator; creator.libraryID = this._libraryID; creator.setFields(fields); var creatorID = creator.save(); } newItem.setCreator(creatorIndex++, creator, creatorTypeID); } }, "_saveNotes":function(item, parentID) { for(var i=0; i<item.notes.length; i++) { var note = item.notes[i]; if(!note) continue; var myNote = new Zotero.Item('note'); myNote.libraryID = this._libraryID; myNote.setNote(typeof note == "object" ? note.note : note); if(parentID) { myNote.setSource(parentID); } var noteID = myNote.save(); if(typeof note == "object") { // handle see also myNote = Zotero.Items.get(noteID); this._saveTags(note, myNote); } } }, "_saveTags":function(item, newItem) { // add to ID map if(item.itemID) { this._IDMap[item.itemID] = newItem.id; } // add see alsos if(item.seeAlso) { for(var i=0; i<item.seeAlso.length; i++) { var seeAlso = item.seeAlso[i]; if(this._IDMap[seeAlso]) { newItem.addRelatedItem(this._IDMap[seeAlso]); } } newItem.save(); } // if all tags are automatic and automatic tags pref is on, return immediately var tagPref = Zotero.Prefs.get("automaticTags"); if(this._forceTagType == 1 && !tagPref) return; // add tags if(item.tags) { var tagsToAdd = {}; tagsToAdd[0] = []; // user tags tagsToAdd[1] = []; // automatic tags for(var i=0; i<item.tags.length; i++) { var tag = item.tags[i]; if(typeof(tag) == "string") { // accept strings in tag array as automatic tags, or, if // importing, as non-automatic tags if(this._forceTagType) { tagsToAdd[this._forceTagType].push(tag); } else { tagsToAdd[0].push(tag); } } else if(typeof(tag) == "object") { // also accept objects if(tag.tag || tag.name) { if(this._forceTagType) { var tagType = this._forceTagType; } else if(tag.type) { // skip automatic tags during import too (?) if(tag.type == 1 && !tagPref) continue; var tagType = tag.type; } else { var tagType = 0; } tagsToAdd[tagType].push(tag.tag ? tag.tag : tag.name); } } } for (var type in [0, 1]) { if (tagsToAdd[type].length) { newItem.addTags(tagsToAdd[type], type); } } } } } Zotero.Translate.ItemGetter = function() { this._itemsLeft = []; this._collectionsLeft = null; this._exportFileDirectory = null; this.legacy = false; }; Zotero.Translate.ItemGetter.prototype = { "setItems":function(items) { this._itemsLeft = items; this.numItems = this._itemsLeft.length; }, "setCollection":function(collection, getChildCollections) { // get items in this collection var haveItems = {}; this._itemsLeft = collection.getChildItems(); for each(var item in this._itemsLeft) haveItems[item.id] = true; if(!this._itemsLeft) { this._itemsLeft = []; } if(getChildCollections) { // get child collections this._collectionsLeft = Zotero.getCollections(collection.id, true); // get items in child collections for each(var collection in this._collectionsLeft) { var childItems = collection.getChildItems(); if(childItems) { for each(var item in childItems) { if(!haveItems[item.id]) { haveItems[item.id] = true; this._itemsLeft.push(item);; } } } } } this.numItems = this._itemsLeft.length; }, "setAll":function(libraryID, getChildCollections) { this._itemsLeft = Zotero.Items.getAll(true, libraryID); if(getChildCollections) { this._collectionsLeft = Zotero.getCollections(null, true, libraryID); } this.numItems = this._itemsLeft.length; }, "exportFiles":function(dir, extension) { // generate directory this._exportFileDirectory = Components.classes["@mozilla.org/file/local;1"]. createInstance(Components.interfaces.nsILocalFile); this._exportFileDirectory.initWithFile(dir.parent); // delete this file if it exists if(dir.exists()) { dir.remove(true); } // get name var name = dir.leafName; this._exportFileDirectory.append(name); // create directory this._exportFileDirectory.create(Components.interfaces.nsIFile.DIRECTORY_TYPE, 0700); // generate a new location for the exported file, with the appropriate // extension var location = Components.classes["@mozilla.org/file/local;1"]. createInstance(Components.interfaces.nsILocalFile); location.initWithFile(this._exportFileDirectory); location.append(name+"."+extension); return location; }, /** * Converts an attachment to array format and copies it to the export folder if desired */ "_attachmentToArray":function(attachment) { var attachmentArray = Zotero.Utilities.Internal.itemToExportFormat(attachment, this.legacy); var linkMode = attachment.attachmentLinkMode; if(linkMode != Zotero.Attachments.LINK_MODE_LINKED_URL) { var attachFile = attachment.getFile(); attachmentArray.localPath = attachFile.path; if(this._exportFileDirectory) { var exportDir = this._exportFileDirectory; // Add path and filename if not an internet link var attachFile = attachment.getFile(); if(attachFile) { attachmentArray.defaultPath = "files/" + attachment.id + "/" + attachFile.leafName; attachmentArray.filename = attachFile.leafName; /** * Copies the attachment file to the specified relative path from the * export directory. * @param {String} attachPath The path to which the file should be exported * including the filename. If supporting files are included, they will be * copied as well without any renaming. * @param {Boolean} overwriteExisting Optional - If this is set to false, the * function will throw an error when exporting a file would require an existing * file to be overwritten. If true, the file will be silently overwritten. * defaults to false if not provided. */ attachmentArray.saveFile = function(attachPath, overwriteExisting) { // Ensure a valid path is specified if(attachPath === undefined || attachPath == "") { throw new Error("ERROR_EMPTY_PATH"); } // Set the default value of overwriteExisting if it was not provided if (overwriteExisting === undefined) { overwriteExisting = false; } // Separate the path into a list of subdirectories and the attachment filename, // and initialize the required file objects var targetFile = Components.classes["@mozilla.org/file/local;1"]. createInstance(Components.interfaces.nsILocalFile); targetFile.initWithFile(exportDir); for each(var dir in attachPath.split("/")) targetFile.append(dir); // First, check that we have not gone lower than exportDir in the hierarchy var parent = targetFile, inExportFileDirectory; while((parent = parent.parent)) { if(exportDir.equals(parent)) { inExportFileDirectory = true; break; } } if(!inExportFileDirectory) { throw new Error("Invalid path; attachment cannot be placed above export "+ "directory in the file hirarchy"); } // Create intermediate directories if they don't exist parent = targetFile; while((parent = parent.parent) && !parent.exists()) { parent.create(Components.interfaces.nsIFile.DIRECTORY_TYPE, 0700); } // Delete any existing file if overwriteExisting is set, or throw an exception // if it is not if(targetFile.exists()) { if(overwriteExisting) { targetFile.remove(false); } else { throw new Error("ERROR_FILE_EXISTS " + targetFile.leafName); } } var directory = targetFile.parent; // The only attachments that can have multiple supporting files are imported // attachments of mime type text/html (specified in Attachments.getNumFiles()) if(attachment.attachmentMIMEType == "text/html" && linkMode != Zotero.Attachments.LINK_MODE_LINKED_FILE && Zotero.Attachments.getNumFiles(attachment) > 1) { // Attachment is a snapshot with supporting files. Check if any of the // supporting files would cause a name conflict, and build a list of transfers // that should be performed var copySrcs = []; var files = attachment.getFile().parent.directoryEntries; while (files.hasMoreElements()) { file = files.getNext(); file.QueryInterface(Components.interfaces.nsIFile); // Ignore the main attachment file (has already been checked for name conflict) if(attachFile.equals(file)) { continue; } // Remove any existing files in the target destination if overwriteExisting // is set, or throw an exception if it is not var targetSupportFile = targetFile.parent.clone(); targetSupportFile.append(file.leafName); if(targetSupportFile.exists()) { if(overwriteExisting) { targetSupportFile.remove(false); } else { throw new Error("ERROR_FILE_EXISTS " + targetSupportFile.leafName); } } copySrcs.push(file.clone()); } // No conflicts were detected or all conflicts were resolved, perform the copying attachFile.copyTo(directory, targetFile.leafName); for(var i = 0; i < copySrcs.length; i++) { copySrcs[i].copyTo(directory, copySrcs[i].leafName); } } else { // Attachment is a single file // Copy the file to the specified location attachFile.copyTo(directory, targetFile.leafName); } attachmentArray.path = targetFile.path; }; } } } return attachmentArray; }, /** * Retrieves the next available item */ "nextItem":function() { while(this._itemsLeft.length != 0) { var returnItem = this._itemsLeft.shift(); // export file data for single files if(returnItem.isAttachment()) { // an independent attachment var returnItemArray = this._attachmentToArray(returnItem); if(returnItemArray) return returnItemArray; } else { var returnItemArray = Zotero.Utilities.Internal.itemToExportFormat(returnItem, this.legacy); // get attachments, although only urls will be passed if exportFileData is off returnItemArray.attachments = []; var attachments = returnItem.getAttachments(); for each(var attachmentID in attachments) { var attachment = Zotero.Items.get(attachmentID); var attachmentInfo = this._attachmentToArray(attachment); if(attachmentInfo) { returnItemArray.attachments.push(attachmentInfo); } } return returnItemArray; } } return false; }, "nextCollection":function() { if(!this._collectionsLeft || this._collectionsLeft.length == 0) return false; var returnItem = this._collectionsLeft.shift(); var obj = returnItem.serialize(true); obj.id = obj.primary.collectionID; obj.name = obj.fields.name; return obj; } } Zotero.Translate.ItemGetter.prototype.__defineGetter__("numItemsRemaining", function() { return this._itemsLeft.length });
Sort items for translator In response to https://forums.zotero.org/discussion/60375?page=1#Item_6
chrome/content/zotero/xpcom/translation/translate_item.js
Sort items for translator
<ide><path>hrome/content/zotero/xpcom/translation/translate_item.js <ide> Zotero.Translate.ItemGetter.prototype = { <ide> "setItems":function(items) { <ide> this._itemsLeft = items; <add> this._itemsLeft.sort(function(a, b) { return a.id - b.id; }); <ide> this.numItems = this._itemsLeft.length; <ide> }, <ide> <ide> } <ide> } <ide> <add> this._itemsLeft.sort(function(a, b) { return a.id - b.id; }); <ide> this.numItems = this._itemsLeft.length; <ide> }, <ide> <ide> this._collectionsLeft = Zotero.getCollections(null, true, libraryID); <ide> } <ide> <add> this._itemsLeft.sort(function(a, b) { return a.id - b.id; }); <ide> this.numItems = this._itemsLeft.length; <ide> }, <ide>
Java
epl-1.0
b555ec3a09714e0601ba2caeace96957aab42638
0
violinlakshmi/opendaylight,mandeepdhami/controller,Sushma7785/OpenDayLight-Load-Balancer,aryantaheri/controller,522986491/controller,Sushma7785/OpenDayLight-Load-Balancer,aryantaheri/monitoring-controller,aryantaheri/controller,my76128/controller,Johnson-Chou/test,tx1103mark/controller,tx1103mark/controller,inocybe/odl-controller,my76128/controller,aryantaheri/monitoring-controller,aryantaheri/controller,violinlakshmi/opendaylight,tx1103mark/controller,opendaylight/controller,Johnson-Chou/test,aryantaheri/monitoring-controller,aryantaheri/monitoring-controller,my76128/controller,mandeepdhami/controller,inocybe/odl-controller,522986491/controller,my76128/controller,violinlakshmi/opendaylight,mandeepdhami/controller,mandeepdhami/controller,tx1103mark/controller
/* * Copyright IBM Corporation, 2013. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.networkconfig.neutron.northbound; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.codehaus.enunciate.jaxrs.ResponseCode; import org.codehaus.enunciate.jaxrs.StatusCodes; import org.opendaylight.controller.networkconfig.neutron.INeutronNetworkCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronPortAware; import org.opendaylight.controller.networkconfig.neutron.INeutronPortCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronSubnetCRUD; import org.opendaylight.controller.networkconfig.neutron.NeutronCRUDInterfaces; import org.opendaylight.controller.networkconfig.neutron.NeutronPort; import org.opendaylight.controller.networkconfig.neutron.NeutronSubnet; import org.opendaylight.controller.networkconfig.neutron.Neutron_IPs; import org.opendaylight.controller.northbound.commons.RestMessages; import org.opendaylight.controller.northbound.commons.exception.ServiceUnavailableException; import org.opendaylight.controller.sal.utils.ServiceHelper; /** * Open DOVE Northbound REST APIs.<br> * This class provides REST APIs for managing the open DOVE * * <br> * <br> * Authentication scheme : <b>HTTP Basic</b><br> * Authentication realm : <b>opendaylight</b><br> * Transport : <b>HTTP and HTTPS</b><br> * <br> * HTTPS Authentication is disabled by default. Administrator can enable it in * tomcat-server.xml after adding a proper keystore / SSL certificate from a * trusted authority.<br> * More info : * http://tomcat.apache.org/tomcat-7.0-doc/ssl-howto.html#Configuration * */ @Path("/ports") public class NeutronPortsNorthbound { private NeutronPort extractFields(NeutronPort o, List<String> fields) { return o.extractFields(fields); } /** * Returns a list of all Ports */ @GET @Produces({ MediaType.APPLICATION_JSON }) //@TypeHint(OpenStackPorts.class) @StatusCodes({ @ResponseCode(code = 200, condition = "Operation successful"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 501, condition = "Not Implemented") }) public Response listPorts( // return fields @QueryParam("fields") List<String> fields, // note: openstack isn't clear about filtering on lists, so we aren't handling them @QueryParam("id") String queryID, @QueryParam("network_id") String queryNetworkID, @QueryParam("name") String queryName, @QueryParam("admin_state_up") String queryAdminStateUp, @QueryParam("status") String queryStatus, @QueryParam("mac_address") String queryMACAddress, @QueryParam("device_id") String queryDeviceID, @QueryParam("device_owner") String queryDeviceOwner, @QueryParam("tenant_id") String queryTenantID, // pagination @QueryParam("limit") String limit, @QueryParam("marker") String marker, @QueryParam("page_reverse") String pageReverse // sorting not supported ) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } List<NeutronPort> allPorts = portInterface.getAllPorts(); List<NeutronPort> ans = new ArrayList<NeutronPort>(); Iterator<NeutronPort> i = allPorts.iterator(); while (i.hasNext()) { NeutronPort oSS = i.next(); if ((queryID == null || queryID.equals(oSS.getID())) && (queryNetworkID == null || queryNetworkID.equals(oSS.getNetworkUUID())) && (queryName == null || queryName.equals(oSS.getName())) && (queryAdminStateUp == null || queryAdminStateUp.equals(oSS.getAdminStateUp())) && (queryStatus == null || queryStatus.equals(oSS.getStatus())) && (queryMACAddress == null || queryMACAddress.equals(oSS.getMacAddress())) && (queryDeviceID == null || queryDeviceID.equals(oSS.getDeviceID())) && (queryDeviceOwner == null || queryDeviceOwner.equals(oSS.getDeviceOwner())) && (queryTenantID == null || queryTenantID.equals(oSS.getTenantID()))) { if (fields.size() > 0) { ans.add(extractFields(oSS,fields)); } else { ans.add(oSS); } } } //TODO: apply pagination to results return Response.status(200).entity( new NeutronPortRequest(ans)).build(); } /** * Returns a specific Port */ @Path("{portUUID}") @GET @Produces({ MediaType.APPLICATION_JSON }) //@TypeHint(OpenStackPorts.class) @StatusCodes({ @ResponseCode(code = 200, condition = "Operation successful"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 404, condition = "Not Found"), @ResponseCode(code = 501, condition = "Not Implemented") }) public Response showPort( @PathParam("portUUID") String portUUID, // return fields @QueryParam("fields") List<String> fields ) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } if (!portInterface.portExists(portUUID)) { return Response.status(404).build(); } if (fields.size() > 0) { NeutronPort ans = portInterface.getPort(portUUID); return Response.status(200).entity( new NeutronPortRequest(extractFields(ans, fields))).build(); } else { return Response.status(200).entity( new NeutronPortRequest(portInterface.getPort(portUUID))).build(); } } /** * Creates new Ports */ @POST @Produces({ MediaType.APPLICATION_JSON }) @Consumes({ MediaType.APPLICATION_JSON }) //@TypeHint(OpenStackPorts.class) @StatusCodes({ @ResponseCode(code = 201, condition = "Created"), @ResponseCode(code = 400, condition = "Bad Request"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 403, condition = "Forbidden"), @ResponseCode(code = 404, condition = "Not Found"), @ResponseCode(code = 409, condition = "Conflict"), @ResponseCode(code = 501, condition = "Not Implemented"), @ResponseCode(code = 503, condition = "MAC generation failure") }) public Response createPorts(final NeutronPortRequest input) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } INeutronNetworkCRUD networkInterface = NeutronCRUDInterfaces.getINeutronNetworkCRUD( this); if (networkInterface == null) { throw new ServiceUnavailableException("Network CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } INeutronSubnetCRUD subnetInterface = NeutronCRUDInterfaces.getINeutronSubnetCRUD( this); if (subnetInterface == null) { throw new ServiceUnavailableException("Subnet CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } if (input.isSingleton()) { NeutronPort singleton = input.getSingleton(); /* * the port must be part of an existing network, must not already exist, * have a valid MAC and the MAC not be in use */ if (singleton.getNetworkUUID() == null) { return Response.status(400).build(); } if (portInterface.portExists(singleton.getID())) { return Response.status(400).build(); } if (!networkInterface.networkExists(singleton.getNetworkUUID())) { return Response.status(404).build(); } if (singleton.getMacAddress() == null || !singleton.getMacAddress().matches("^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$")) { return Response.status(400).build(); } if (portInterface.macInUse(singleton.getMacAddress())) { return Response.status(409).build(); } Object[] instances = ServiceHelper.getGlobalInstances(INeutronPortAware.class, this, null); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; int status = service.canCreatePort(singleton); if (status < 200 || status > 299) { return Response.status(status).build(); } } } /* * if fixed IPs are specified, each one has to have an existing subnet ID * that is in the same scoping network as the port. In addition, if an IP * address is specified it has to be a valid address for the subnet and not * already in use */ List<Neutron_IPs> fixedIPs = singleton.getFixedIPs(); if (fixedIPs != null && fixedIPs.size() > 0) { Iterator<Neutron_IPs> fixedIPIterator = fixedIPs.iterator(); while (fixedIPIterator.hasNext()) { Neutron_IPs ip = fixedIPIterator.next(); if (ip.getSubnetUUID() == null) { return Response.status(400).build(); } if (!subnetInterface.subnetExists(ip.getSubnetUUID())) { return Response.status(400).build(); } NeutronSubnet subnet = subnetInterface.getSubnet(ip.getSubnetUUID()); if (!singleton.getNetworkUUID().equalsIgnoreCase(subnet.getNetworkUUID())) { return Response.status(400).build(); } if (ip.getIpAddress() != null) { if (!subnet.isValidIP(ip.getIpAddress())) { return Response.status(400).build(); } if (subnet.isIPInUse(ip.getIpAddress())) { return Response.status(409).build(); } } } } // add the port to the cache portInterface.addPort(singleton); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; service.neutronPortCreated(singleton); } } } else { List<NeutronPort> bulk = input.getBulk(); Iterator<NeutronPort> i = bulk.iterator(); HashMap<String, NeutronPort> testMap = new HashMap<String, NeutronPort>(); Object[] instances = ServiceHelper.getGlobalInstances(INeutronPortAware.class, this, null); while (i.hasNext()) { NeutronPort test = i.next(); /* * the port must be part of an existing network, must not already exist, * have a valid MAC and the MAC not be in use. Further the bulk request * can't already contain a new port with the same UUID */ if (portInterface.portExists(test.getID())) { return Response.status(400).build(); } if (testMap.containsKey(test.getID())) { return Response.status(400).build(); } for (NeutronPort check : testMap.values()) { if (test.getMacAddress().equalsIgnoreCase(check.getMacAddress())) { return Response.status(409).build(); } for (Neutron_IPs test_fixedIP : test.getFixedIPs()) { for (Neutron_IPs check_fixedIP : check.getFixedIPs()) { if (test_fixedIP.getIpAddress().equals(check_fixedIP.getIpAddress())) { return Response.status(409).build(); } } } } testMap.put(test.getID(), test); if (!networkInterface.networkExists(test.getNetworkUUID())) { return Response.status(404).build(); } if (!test.getMacAddress().matches("^([0-9A-F]{2}[:-]){5}([0-9A-F]{2})$")) { return Response.status(400).build(); } if (portInterface.macInUse(test.getMacAddress())) { return Response.status(409).build(); } if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; int status = service.canCreatePort(test); if (status < 200 || status > 299) { return Response.status(status).build(); } } } /* * if fixed IPs are specified, each one has to have an existing subnet ID * that is in the same scoping network as the port. In addition, if an IP * address is specified it has to be a valid address for the subnet and not * already in use (or be the gateway IP address of the subnet) */ List<Neutron_IPs> fixedIPs = test.getFixedIPs(); if (fixedIPs != null && fixedIPs.size() > 0) { Iterator<Neutron_IPs> fixedIPIterator = fixedIPs.iterator(); while (fixedIPIterator.hasNext()) { Neutron_IPs ip = fixedIPIterator.next(); if (ip.getSubnetUUID() == null) { return Response.status(400).build(); } if (!subnetInterface.subnetExists(ip.getSubnetUUID())) { return Response.status(400).build(); } NeutronSubnet subnet = subnetInterface.getSubnet(ip.getSubnetUUID()); if (!test.getNetworkUUID().equalsIgnoreCase(subnet.getNetworkUUID())) { return Response.status(400).build(); } if (ip.getIpAddress() != null) { if (!subnet.isValidIP(ip.getIpAddress())) { return Response.status(400).build(); } //TODO: need to add consideration for a fixed IP being assigned the same address as a allocated IP in the //same bulk create if (subnet.isIPInUse(ip.getIpAddress())) { return Response.status(409).build(); } } } } } //once everything has passed, then we can add to the cache i = bulk.iterator(); while (i.hasNext()) { NeutronPort test = i.next(); portInterface.addPort(test); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; service.neutronPortCreated(test); } } } } return Response.status(201).entity(input).build(); } /** * Updates a Port */ @Path("{portUUID}") @PUT @Produces({ MediaType.APPLICATION_JSON }) @Consumes({ MediaType.APPLICATION_JSON }) //@TypeHint(OpenStackPorts.class) @StatusCodes({ @ResponseCode(code = 200, condition = "Operation successful"), @ResponseCode(code = 400, condition = "Bad Request"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 403, condition = "Forbidden"), @ResponseCode(code = 404, condition = "Not Found"), @ResponseCode(code = 409, condition = "Conflict"), @ResponseCode(code = 501, condition = "Not Implemented") }) public Response updatePort( @PathParam("portUUID") String portUUID, NeutronPortRequest input ) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } INeutronSubnetCRUD subnetInterface = NeutronCRUDInterfaces.getINeutronSubnetCRUD( this); if (subnetInterface == null) { throw new ServiceUnavailableException("Subnet CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } // port has to exist and only a single delta is supported if (!portInterface.portExists(portUUID)) { return Response.status(404).build(); } NeutronPort target = portInterface.getPort(portUUID); if (!input.isSingleton()) { return Response.status(400).build(); } NeutronPort singleton = input.getSingleton(); NeutronPort original = portInterface.getPort(portUUID); // deltas restricted by Neutron if (singleton.getID() != null || singleton.getTenantID() != null || singleton.getStatus() != null) { return Response.status(400).build(); } Object[] instances = ServiceHelper.getGlobalInstances(INeutronPortAware.class, this, null); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; int status = service.canUpdatePort(singleton, original); if (status < 200 || status > 299) { return Response.status(status).build(); } } } // Verify the new fixed ips are valid List<Neutron_IPs> fixedIPs = singleton.getFixedIPs(); if (fixedIPs != null && fixedIPs.size() > 0) { Iterator<Neutron_IPs> fixedIPIterator = fixedIPs.iterator(); while (fixedIPIterator.hasNext()) { Neutron_IPs ip = fixedIPIterator.next(); if (ip.getSubnetUUID() == null) { return Response.status(400).build(); } if (!subnetInterface.subnetExists(ip.getSubnetUUID())) { return Response.status(400).build(); } NeutronSubnet subnet = subnetInterface.getSubnet(ip.getSubnetUUID()); if (!target.getNetworkUUID().equalsIgnoreCase(subnet.getNetworkUUID())) { return Response.status(400).build(); } if (ip.getIpAddress() != null) { if (!subnet.isValidIP(ip.getIpAddress())) { return Response.status(400).build(); } if (subnet.isIPInUse(ip.getIpAddress())) { return Response.status(409).build(); } } } } // TODO: Support change of security groups // update the port and return the modified object portInterface.updatePort(portUUID, singleton); NeutronPort updatedPort = portInterface.getPort(portUUID); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; service.neutronPortUpdated(updatedPort); } } return Response.status(200).entity( new NeutronPortRequest(updatedPort)).build(); } /** * Deletes a Port */ @Path("{portUUID}") @DELETE @StatusCodes({ @ResponseCode(code = 204, condition = "No Content"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 403, condition = "Forbidden"), @ResponseCode(code = 404, condition = "Not Found"), @ResponseCode(code = 501, condition = "Not Implemented") }) public Response deletePort( @PathParam("portUUID") String portUUID) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } // port has to exist and not be owned by anyone. then it can be removed from the cache if (!portInterface.portExists(portUUID)) { return Response.status(404).build(); } NeutronPort port = portInterface.getPort(portUUID); if (port.getDeviceID() != null || port.getDeviceOwner() != null) { Response.status(403).build(); } NeutronPort singleton = portInterface.getPort(portUUID); Object[] instances = ServiceHelper.getGlobalInstances(INeutronPortAware.class, this, null); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; int status = service.canDeletePort(singleton); if (status < 200 || status > 299) { return Response.status(status).build(); } } } portInterface.removePort(portUUID); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; service.neutronPortDeleted(singleton); } } return Response.status(204).build(); } }
opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronPortsNorthbound.java
/* * Copyright IBM Corporation, 2013. All rights reserved. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 which accompanies this distribution, * and is available at http://www.eclipse.org/legal/epl-v10.html */ package org.opendaylight.controller.networkconfig.neutron.northbound; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.codehaus.enunciate.jaxrs.ResponseCode; import org.codehaus.enunciate.jaxrs.StatusCodes; import org.opendaylight.controller.networkconfig.neutron.INeutronNetworkCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronPortAware; import org.opendaylight.controller.networkconfig.neutron.INeutronPortCRUD; import org.opendaylight.controller.networkconfig.neutron.INeutronSubnetAware; import org.opendaylight.controller.networkconfig.neutron.INeutronSubnetCRUD; import org.opendaylight.controller.networkconfig.neutron.NeutronCRUDInterfaces; import org.opendaylight.controller.networkconfig.neutron.NeutronPort; import org.opendaylight.controller.networkconfig.neutron.NeutronSubnet; import org.opendaylight.controller.networkconfig.neutron.Neutron_IPs; import org.opendaylight.controller.northbound.commons.RestMessages; import org.opendaylight.controller.northbound.commons.exception.ServiceUnavailableException; import org.opendaylight.controller.sal.utils.ServiceHelper; /** * Open DOVE Northbound REST APIs.<br> * This class provides REST APIs for managing the open DOVE * * <br> * <br> * Authentication scheme : <b>HTTP Basic</b><br> * Authentication realm : <b>opendaylight</b><br> * Transport : <b>HTTP and HTTPS</b><br> * <br> * HTTPS Authentication is disabled by default. Administrator can enable it in * tomcat-server.xml after adding a proper keystore / SSL certificate from a * trusted authority.<br> * More info : * http://tomcat.apache.org/tomcat-7.0-doc/ssl-howto.html#Configuration * */ @Path("/ports") public class NeutronPortsNorthbound { private NeutronPort extractFields(NeutronPort o, List<String> fields) { return o.extractFields(fields); } /** * Returns a list of all Ports */ @GET @Produces({ MediaType.APPLICATION_JSON }) //@TypeHint(OpenStackPorts.class) @StatusCodes({ @ResponseCode(code = 200, condition = "Operation successful"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 501, condition = "Not Implemented") }) public Response listPorts( // return fields @QueryParam("fields") List<String> fields, // note: openstack isn't clear about filtering on lists, so we aren't handling them @QueryParam("id") String queryID, @QueryParam("network_id") String queryNetworkID, @QueryParam("name") String queryName, @QueryParam("admin_state_up") String queryAdminStateUp, @QueryParam("status") String queryStatus, @QueryParam("mac_address") String queryMACAddress, @QueryParam("device_id") String queryDeviceID, @QueryParam("device_owner") String queryDeviceOwner, @QueryParam("tenant_id") String queryTenantID, // pagination @QueryParam("limit") String limit, @QueryParam("marker") String marker, @QueryParam("page_reverse") String pageReverse // sorting not supported ) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } List<NeutronPort> allPorts = portInterface.getAllPorts(); List<NeutronPort> ans = new ArrayList<NeutronPort>(); Iterator<NeutronPort> i = allPorts.iterator(); while (i.hasNext()) { NeutronPort oSS = i.next(); if ((queryID == null || queryID.equals(oSS.getID())) && (queryNetworkID == null || queryNetworkID.equals(oSS.getNetworkUUID())) && (queryName == null || queryName.equals(oSS.getName())) && (queryAdminStateUp == null || queryAdminStateUp.equals(oSS.getAdminStateUp())) && (queryStatus == null || queryStatus.equals(oSS.getStatus())) && (queryMACAddress == null || queryMACAddress.equals(oSS.getMacAddress())) && (queryDeviceID == null || queryDeviceID.equals(oSS.getDeviceID())) && (queryDeviceOwner == null || queryDeviceOwner.equals(oSS.getDeviceOwner())) && (queryTenantID == null || queryTenantID.equals(oSS.getTenantID()))) { if (fields.size() > 0) { ans.add(extractFields(oSS,fields)); } else { ans.add(oSS); } } } //TODO: apply pagination to results return Response.status(200).entity( new NeutronPortRequest(ans)).build(); } /** * Returns a specific Port */ @Path("{portUUID}") @GET @Produces({ MediaType.APPLICATION_JSON }) //@TypeHint(OpenStackPorts.class) @StatusCodes({ @ResponseCode(code = 200, condition = "Operation successful"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 404, condition = "Not Found"), @ResponseCode(code = 501, condition = "Not Implemented") }) public Response showPort( @PathParam("portUUID") String portUUID, // return fields @QueryParam("fields") List<String> fields ) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } if (!portInterface.portExists(portUUID)) { return Response.status(404).build(); } if (fields.size() > 0) { NeutronPort ans = portInterface.getPort(portUUID); return Response.status(200).entity( new NeutronPortRequest(extractFields(ans, fields))).build(); } else { return Response.status(200).entity( new NeutronPortRequest(portInterface.getPort(portUUID))).build(); } } /** * Creates new Ports */ @POST @Produces({ MediaType.APPLICATION_JSON }) @Consumes({ MediaType.APPLICATION_JSON }) //@TypeHint(OpenStackPorts.class) @StatusCodes({ @ResponseCode(code = 201, condition = "Created"), @ResponseCode(code = 400, condition = "Bad Request"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 403, condition = "Forbidden"), @ResponseCode(code = 404, condition = "Not Found"), @ResponseCode(code = 409, condition = "Conflict"), @ResponseCode(code = 501, condition = "Not Implemented"), @ResponseCode(code = 503, condition = "MAC generation failure") }) public Response createPorts(final NeutronPortRequest input) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } INeutronNetworkCRUD networkInterface = NeutronCRUDInterfaces.getINeutronNetworkCRUD( this); if (networkInterface == null) { throw new ServiceUnavailableException("Network CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } INeutronSubnetCRUD subnetInterface = NeutronCRUDInterfaces.getINeutronSubnetCRUD( this); if (subnetInterface == null) { throw new ServiceUnavailableException("Subnet CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } if (input.isSingleton()) { NeutronPort singleton = input.getSingleton(); /* * the port must be part of an existing network, must not already exist, * have a valid MAC and the MAC not be in use */ if (singleton.getNetworkUUID() == null) { return Response.status(400).build(); } if (portInterface.portExists(singleton.getID())) { return Response.status(400).build(); } if (!networkInterface.networkExists(singleton.getNetworkUUID())) { return Response.status(404).build(); } if (singleton.getMacAddress() == null || !singleton.getMacAddress().matches("^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$")) { return Response.status(400).build(); } if (portInterface.macInUse(singleton.getMacAddress())) { return Response.status(409).build(); } Object[] instances = ServiceHelper.getGlobalInstances(INeutronPortAware.class, this, null); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; int status = service.canCreatePort(singleton); if (status < 200 || status > 299) { return Response.status(status).build(); } } } /* * if fixed IPs are specified, each one has to have an existing subnet ID * that is in the same scoping network as the port. In addition, if an IP * address is specified it has to be a valid address for the subnet and not * already in use */ List<Neutron_IPs> fixedIPs = singleton.getFixedIPs(); if (fixedIPs != null && fixedIPs.size() > 0) { Iterator<Neutron_IPs> fixedIPIterator = fixedIPs.iterator(); while (fixedIPIterator.hasNext()) { Neutron_IPs ip = fixedIPIterator.next(); if (ip.getSubnetUUID() == null) { return Response.status(400).build(); } if (!subnetInterface.subnetExists(ip.getSubnetUUID())) { return Response.status(400).build(); } NeutronSubnet subnet = subnetInterface.getSubnet(ip.getSubnetUUID()); if (!singleton.getNetworkUUID().equalsIgnoreCase(subnet.getNetworkUUID())) { return Response.status(400).build(); } if (ip.getIpAddress() != null) { if (!subnet.isValidIP(ip.getIpAddress())) { return Response.status(400).build(); } if (subnet.isIPInUse(ip.getIpAddress())) { return Response.status(409).build(); } } } } // add the port to the cache portInterface.addPort(singleton); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; service.neutronPortCreated(singleton); } } } else { List<NeutronPort> bulk = input.getBulk(); Iterator<NeutronPort> i = bulk.iterator(); HashMap<String, NeutronPort> testMap = new HashMap<String, NeutronPort>(); Object[] instances = ServiceHelper.getGlobalInstances(INeutronSubnetAware.class, this, null); while (i.hasNext()) { NeutronPort test = i.next(); /* * the port must be part of an existing network, must not already exist, * have a valid MAC and the MAC not be in use. Further the bulk request * can't already contain a new port with the same UUID */ if (portInterface.portExists(test.getID())) { return Response.status(400).build(); } if (testMap.containsKey(test.getID())) { return Response.status(400).build(); } for (NeutronPort check : testMap.values()) { if (test.getMacAddress().equalsIgnoreCase(check.getMacAddress())) { return Response.status(409).build(); } for (Neutron_IPs test_fixedIP : test.getFixedIPs()) { for (Neutron_IPs check_fixedIP : check.getFixedIPs()) { if (test_fixedIP.getIpAddress().equals(check_fixedIP.getIpAddress())) { return Response.status(409).build(); } } } } testMap.put(test.getID(), test); if (!networkInterface.networkExists(test.getNetworkUUID())) { return Response.status(404).build(); } if (!test.getMacAddress().matches("^([0-9A-F]{2}[:-]){5}([0-9A-F]{2})$")) { return Response.status(400).build(); } if (portInterface.macInUse(test.getMacAddress())) { return Response.status(409).build(); } if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; int status = service.canCreatePort(test); if (status < 200 || status > 299) { return Response.status(status).build(); } } } /* * if fixed IPs are specified, each one has to have an existing subnet ID * that is in the same scoping network as the port. In addition, if an IP * address is specified it has to be a valid address for the subnet and not * already in use (or be the gateway IP address of the subnet) */ List<Neutron_IPs> fixedIPs = test.getFixedIPs(); if (fixedIPs != null && fixedIPs.size() > 0) { Iterator<Neutron_IPs> fixedIPIterator = fixedIPs.iterator(); while (fixedIPIterator.hasNext()) { Neutron_IPs ip = fixedIPIterator.next(); if (ip.getSubnetUUID() == null) { return Response.status(400).build(); } if (!subnetInterface.subnetExists(ip.getSubnetUUID())) { return Response.status(400).build(); } NeutronSubnet subnet = subnetInterface.getSubnet(ip.getSubnetUUID()); if (!test.getNetworkUUID().equalsIgnoreCase(subnet.getNetworkUUID())) { return Response.status(400).build(); } if (ip.getIpAddress() != null) { if (!subnet.isValidIP(ip.getIpAddress())) { return Response.status(400).build(); } //TODO: need to add consideration for a fixed IP being assigned the same address as a allocated IP in the //same bulk create if (subnet.isIPInUse(ip.getIpAddress())) { return Response.status(409).build(); } } } } } //once everything has passed, then we can add to the cache i = bulk.iterator(); while (i.hasNext()) { NeutronPort test = i.next(); portInterface.addPort(test); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; service.neutronPortCreated(test); } } } } return Response.status(201).entity(input).build(); } /** * Updates a Port */ @Path("{portUUID}") @PUT @Produces({ MediaType.APPLICATION_JSON }) @Consumes({ MediaType.APPLICATION_JSON }) //@TypeHint(OpenStackPorts.class) @StatusCodes({ @ResponseCode(code = 200, condition = "Operation successful"), @ResponseCode(code = 400, condition = "Bad Request"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 403, condition = "Forbidden"), @ResponseCode(code = 404, condition = "Not Found"), @ResponseCode(code = 409, condition = "Conflict"), @ResponseCode(code = 501, condition = "Not Implemented") }) public Response updatePort( @PathParam("portUUID") String portUUID, NeutronPortRequest input ) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } INeutronSubnetCRUD subnetInterface = NeutronCRUDInterfaces.getINeutronSubnetCRUD( this); if (subnetInterface == null) { throw new ServiceUnavailableException("Subnet CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } // port has to exist and only a single delta is supported if (!portInterface.portExists(portUUID)) { return Response.status(404).build(); } NeutronPort target = portInterface.getPort(portUUID); if (!input.isSingleton()) { return Response.status(400).build(); } NeutronPort singleton = input.getSingleton(); NeutronPort original = portInterface.getPort(portUUID); // deltas restricted by Neutron if (singleton.getID() != null || singleton.getTenantID() != null || singleton.getStatus() != null) { return Response.status(400).build(); } Object[] instances = ServiceHelper.getGlobalInstances(INeutronPortAware.class, this, null); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; int status = service.canUpdatePort(singleton, original); if (status < 200 || status > 299) { return Response.status(status).build(); } } } // Verify the new fixed ips are valid List<Neutron_IPs> fixedIPs = singleton.getFixedIPs(); if (fixedIPs != null && fixedIPs.size() > 0) { Iterator<Neutron_IPs> fixedIPIterator = fixedIPs.iterator(); while (fixedIPIterator.hasNext()) { Neutron_IPs ip = fixedIPIterator.next(); if (ip.getSubnetUUID() == null) { return Response.status(400).build(); } if (!subnetInterface.subnetExists(ip.getSubnetUUID())) { return Response.status(400).build(); } NeutronSubnet subnet = subnetInterface.getSubnet(ip.getSubnetUUID()); if (!target.getNetworkUUID().equalsIgnoreCase(subnet.getNetworkUUID())) { return Response.status(400).build(); } if (ip.getIpAddress() != null) { if (!subnet.isValidIP(ip.getIpAddress())) { return Response.status(400).build(); } if (subnet.isIPInUse(ip.getIpAddress())) { return Response.status(409).build(); } } } } // TODO: Support change of security groups // update the port and return the modified object portInterface.updatePort(portUUID, singleton); NeutronPort updatedPort = portInterface.getPort(portUUID); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; service.neutronPortUpdated(updatedPort); } } return Response.status(200).entity( new NeutronPortRequest(updatedPort)).build(); } /** * Deletes a Port */ @Path("{portUUID}") @DELETE @StatusCodes({ @ResponseCode(code = 204, condition = "No Content"), @ResponseCode(code = 401, condition = "Unauthorized"), @ResponseCode(code = 403, condition = "Forbidden"), @ResponseCode(code = 404, condition = "Not Found"), @ResponseCode(code = 501, condition = "Not Implemented") }) public Response deletePort( @PathParam("portUUID") String portUUID) { INeutronPortCRUD portInterface = NeutronCRUDInterfaces.getINeutronPortCRUD(this); if (portInterface == null) { throw new ServiceUnavailableException("Port CRUD Interface " + RestMessages.SERVICEUNAVAILABLE.toString()); } // port has to exist and not be owned by anyone. then it can be removed from the cache if (!portInterface.portExists(portUUID)) { return Response.status(404).build(); } NeutronPort port = portInterface.getPort(portUUID); if (port.getDeviceID() != null || port.getDeviceOwner() != null) { Response.status(403).build(); } NeutronPort singleton = portInterface.getPort(portUUID); Object[] instances = ServiceHelper.getGlobalInstances(INeutronPortAware.class, this, null); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; int status = service.canDeletePort(singleton); if (status < 200 || status > 299) { return Response.status(status).build(); } } } portInterface.removePort(portUUID); if (instances != null) { for (Object instance : instances) { INeutronPortAware service = (INeutronPortAware) instance; service.neutronPortDeleted(singleton); } } return Response.status(204).build(); } }
Invalid cast results in HTTP 500 error returned by the Neutron REST APIs. I think this is a copy+paste error where a INeutronSubnetAware is incorrectly casted to INeutronPortAware. Change-Id: I5009b790edbc60e5b143a520d306d984e14fea37 Signed-off-by: Madhu Venugopal <[email protected]>
opendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronPortsNorthbound.java
Invalid cast results in HTTP 500 error returned by the Neutron REST APIs.
<ide><path>pendaylight/northbound/networkconfiguration/neutron/src/main/java/org/opendaylight/controller/networkconfig/neutron/northbound/NeutronPortsNorthbound.java <ide> import org.opendaylight.controller.networkconfig.neutron.INeutronNetworkCRUD; <ide> import org.opendaylight.controller.networkconfig.neutron.INeutronPortAware; <ide> import org.opendaylight.controller.networkconfig.neutron.INeutronPortCRUD; <del>import org.opendaylight.controller.networkconfig.neutron.INeutronSubnetAware; <ide> import org.opendaylight.controller.networkconfig.neutron.INeutronSubnetCRUD; <ide> import org.opendaylight.controller.networkconfig.neutron.NeutronCRUDInterfaces; <ide> import org.opendaylight.controller.networkconfig.neutron.NeutronPort; <ide> List<NeutronPort> bulk = input.getBulk(); <ide> Iterator<NeutronPort> i = bulk.iterator(); <ide> HashMap<String, NeutronPort> testMap = new HashMap<String, NeutronPort>(); <del> Object[] instances = ServiceHelper.getGlobalInstances(INeutronSubnetAware.class, this, null); <add> Object[] instances = ServiceHelper.getGlobalInstances(INeutronPortAware.class, this, null); <ide> while (i.hasNext()) { <ide> NeutronPort test = i.next(); <ide>
Java
apache-2.0
error: pathspec 'src/main/java/top/quantic/sentry/service/util/FlappingDetector.java' did not match any file(s) known to git
5f72aeb8ce7c5f7f919ba352ef14725bebf82d58
1
quanticc/sentry,quanticc/sentry,quanticc/sentry
package top.quantic.sentry.service.util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.function.Function; /** * Simple implementation of a flapping detection algorithm. * Ported from https://github.com/dclowd9901/FlappingDetection/blob/master/FlappingDetection.js */ public class FlappingDetector<T> { private static final Logger log = LoggerFactory.getLogger(FlappingDetector.class); private static final int DEFAULT_CAPACITY = 100; private final ArrayBlockingQueue<T> states; private final int capacity; private final Function<T, Boolean> healthCheck; private final List<FlappingListener> listenerList = new ArrayList<>(); private volatile State state = State.GOOD; private volatile double lastRatio = 0; private volatile double averageRatio = 1; private volatile double windowMargin = 0.02; private volatile double nominalRatio = 0.98; private volatile boolean settled = false; private volatile double recoveryThreshold; public FlappingDetector(Function<T, Boolean> healthCheck) { this(healthCheck, DEFAULT_CAPACITY); } public FlappingDetector(Function<T, Boolean> healthCheck, int maxStates) { this.healthCheck = healthCheck; this.capacity = maxStates; this.states = new ArrayBlockingQueue<>(capacity, true); } private double determineRatio() { return ((double) states.stream().mapToInt(s -> healthCheck.apply(s) ? 1 : 0).sum()) / states.size(); } public void check(T value) { states.offer(value); double ratio = determineRatio(); double ratioLow = lastRatio - windowMargin; double ratioHigh = lastRatio + windowMargin; if (state == State.GOOD) { // Services is in good state if (ratio < nominalRatio) { // Service dipped below viability toBadState(); } else { if (states.size() == capacity) { // Two shifts for cleanup after a recovery state tryPoll(); tryPoll(); } } } else if (state == State.RECOVERY) { // Service is not 100%, but it appears to be recovering if (ratio > lastRatio) { tryPoll(); } // Service appears to have reached nominal status if (ratio >= nominalRatio) { averageRatio = nominalRatio; toGoodState(); } } else { // Service is in bad mode // test if the ratios have settled settled = ratio == lastRatio; if (settled) { averageRatio = ratio; } // test if ratio is upward bound if (ratio > lastRatio) { // A recovery threshold is the average between the average bad ratio and // the nominal ratio. If the service's ratio surpasses this amount, it's deemed // "in recovery" recoveryThreshold = (nominalRatio - averageRatio) / 2 + averageRatio; if (ratio > recoveryThreshold) { toRecoveryState(); } } if (ratio >= ratioLow && ratio <= ratioHigh) { // maintain window length if ratio doesn't change drastically tryPoll(); } } lastRatio = ratio; log.debug("{}", toString()); } private void tryPoll() { if (!states.isEmpty()) { states.poll(); } } private void toBadState() { state = State.BAD; notifyChange(); } private void toRecoveryState() { state = State.RECOVERY; notifyChange(); } private void toGoodState() { state = State.GOOD; notifyChange(); } public void addListener(FlappingListener listener) { listenerList.add(listener); } public void removeListener(FlappingListener listener) { listenerList.remove(listener); } public void removeAllListeners() { listenerList.clear(); } private void notifyChange() { listenerList.forEach(listener -> listener.onStateChange(new Snapshot(state, settled, lastRatio, averageRatio))); } public State getState() { return state; } public double getAverageRatio() { return averageRatio; } public double getLastRatio() { return lastRatio; } public double getRecoveryThreshold() { return recoveryThreshold; } public int getCapacity() { return capacity; } public double getWindowMargin() { return windowMargin; } public void setWindowMargin(double windowMargin) { this.windowMargin = windowMargin; } public double getNominalRatio() { return nominalRatio; } public void setNominalRatio(double nominalRatio) { this.nominalRatio = nominalRatio; } @Override public String toString() { return "FlappingDetector{" + "state=" + state + ", lastRatio=" + lastRatio + ", settled=" + settled + ", recoveryThreshold=" + recoveryThreshold + ", averageRatio=" + averageRatio + '}'; } public enum State { GOOD, BAD, RECOVERY } public interface FlappingListener { void onStateChange(Snapshot snapshot); } public static final class Snapshot { private final State state; private final boolean settled; private final double lastRatio; private final double averageRatio; private Snapshot(State state, boolean settled, double lastRatio, double averageRatio) { this.state = state; this.settled = settled; this.lastRatio = lastRatio; this.averageRatio = averageRatio; } public State getState() { return state; } public boolean isSettled() { return settled; } public double getLastRatio() { return lastRatio; } public double getAverageRatio() { return averageRatio; } @Override public String toString() { return "Result{" + "state=" + state + ", settled=" + settled + ", lastRatio=" + lastRatio + ", averageRatio=" + averageRatio + '}'; } } }
src/main/java/top/quantic/sentry/service/util/FlappingDetector.java
Add flapping detection
src/main/java/top/quantic/sentry/service/util/FlappingDetector.java
Add flapping detection
<ide><path>rc/main/java/top/quantic/sentry/service/util/FlappingDetector.java <add>package top.quantic.sentry.service.util; <add> <add>import org.slf4j.Logger; <add>import org.slf4j.LoggerFactory; <add> <add>import java.util.ArrayList; <add>import java.util.List; <add>import java.util.concurrent.ArrayBlockingQueue; <add>import java.util.function.Function; <add> <add>/** <add> * Simple implementation of a flapping detection algorithm. <add> * Ported from https://github.com/dclowd9901/FlappingDetection/blob/master/FlappingDetection.js <add> */ <add>public class FlappingDetector<T> { <add> <add> private static final Logger log = LoggerFactory.getLogger(FlappingDetector.class); <add> private static final int DEFAULT_CAPACITY = 100; <add> <add> private final ArrayBlockingQueue<T> states; <add> private final int capacity; <add> private final Function<T, Boolean> healthCheck; <add> private final List<FlappingListener> listenerList = new ArrayList<>(); <add> <add> private volatile State state = State.GOOD; <add> private volatile double lastRatio = 0; <add> private volatile double averageRatio = 1; <add> private volatile double windowMargin = 0.02; <add> private volatile double nominalRatio = 0.98; <add> private volatile boolean settled = false; <add> private volatile double recoveryThreshold; <add> <add> public FlappingDetector(Function<T, Boolean> healthCheck) { <add> this(healthCheck, DEFAULT_CAPACITY); <add> } <add> <add> public FlappingDetector(Function<T, Boolean> healthCheck, int maxStates) { <add> this.healthCheck = healthCheck; <add> this.capacity = maxStates; <add> this.states = new ArrayBlockingQueue<>(capacity, true); <add> } <add> <add> private double determineRatio() { <add> return ((double) states.stream().mapToInt(s -> healthCheck.apply(s) ? 1 : 0).sum()) / states.size(); <add> } <add> <add> public void check(T value) { <add> states.offer(value); <add> double ratio = determineRatio(); <add> double ratioLow = lastRatio - windowMargin; <add> double ratioHigh = lastRatio + windowMargin; <add> <add> if (state == State.GOOD) { <add> // Services is in good state <add> if (ratio < nominalRatio) { <add> // Service dipped below viability <add> toBadState(); <add> } else { <add> if (states.size() == capacity) { <add> // Two shifts for cleanup after a recovery state <add> tryPoll(); <add> tryPoll(); <add> } <add> } <add> } else if (state == State.RECOVERY) { <add> // Service is not 100%, but it appears to be recovering <add> if (ratio > lastRatio) { <add> tryPoll(); <add> } <add> <add> // Service appears to have reached nominal status <add> if (ratio >= nominalRatio) { <add> averageRatio = nominalRatio; <add> toGoodState(); <add> } <add> } else { <add> // Service is in bad mode <add> // test if the ratios have settled <add> settled = ratio == lastRatio; <add> <add> if (settled) { <add> averageRatio = ratio; <add> } <add> <add> // test if ratio is upward bound <add> if (ratio > lastRatio) { <add> // A recovery threshold is the average between the average bad ratio and <add> // the nominal ratio. If the service's ratio surpasses this amount, it's deemed <add> // "in recovery" <add> recoveryThreshold = (nominalRatio - averageRatio) / 2 + averageRatio; <add> <add> if (ratio > recoveryThreshold) { <add> toRecoveryState(); <add> } <add> } <add> <add> if (ratio >= ratioLow && ratio <= ratioHigh) { <add> // maintain window length if ratio doesn't change drastically <add> tryPoll(); <add> } <add> } <add> <add> lastRatio = ratio; <add> log.debug("{}", toString()); <add> } <add> <add> private void tryPoll() { <add> if (!states.isEmpty()) { <add> states.poll(); <add> } <add> } <add> <add> private void toBadState() { <add> state = State.BAD; <add> notifyChange(); <add> } <add> <add> private void toRecoveryState() { <add> state = State.RECOVERY; <add> notifyChange(); <add> } <add> <add> private void toGoodState() { <add> state = State.GOOD; <add> notifyChange(); <add> } <add> <add> public void addListener(FlappingListener listener) { <add> listenerList.add(listener); <add> } <add> <add> public void removeListener(FlappingListener listener) { <add> listenerList.remove(listener); <add> } <add> <add> public void removeAllListeners() { <add> listenerList.clear(); <add> } <add> <add> private void notifyChange() { <add> listenerList.forEach(listener -> listener.onStateChange(new Snapshot(state, settled, lastRatio, averageRatio))); <add> } <add> <add> public State getState() { <add> return state; <add> } <add> <add> public double getAverageRatio() { <add> return averageRatio; <add> } <add> <add> public double getLastRatio() { <add> return lastRatio; <add> } <add> <add> public double getRecoveryThreshold() { <add> return recoveryThreshold; <add> } <add> <add> public int getCapacity() { <add> return capacity; <add> } <add> <add> public double getWindowMargin() { <add> return windowMargin; <add> } <add> <add> public void setWindowMargin(double windowMargin) { <add> this.windowMargin = windowMargin; <add> } <add> <add> public double getNominalRatio() { <add> return nominalRatio; <add> } <add> <add> public void setNominalRatio(double nominalRatio) { <add> this.nominalRatio = nominalRatio; <add> } <add> <add> @Override <add> public String toString() { <add> return "FlappingDetector{" + <add> "state=" + state + <add> ", lastRatio=" + lastRatio + <add> ", settled=" + settled + <add> ", recoveryThreshold=" + recoveryThreshold + <add> ", averageRatio=" + averageRatio + <add> '}'; <add> } <add> <add> public enum State { <add> GOOD, BAD, RECOVERY <add> } <add> <add> public interface FlappingListener { <add> void onStateChange(Snapshot snapshot); <add> } <add> <add> public static final class Snapshot { <add> private final State state; <add> private final boolean settled; <add> private final double lastRatio; <add> private final double averageRatio; <add> <add> private Snapshot(State state, boolean settled, double lastRatio, double averageRatio) { <add> this.state = state; <add> this.settled = settled; <add> this.lastRatio = lastRatio; <add> this.averageRatio = averageRatio; <add> } <add> <add> public State getState() { <add> return state; <add> } <add> <add> public boolean isSettled() { <add> return settled; <add> } <add> <add> public double getLastRatio() { <add> return lastRatio; <add> } <add> <add> public double getAverageRatio() { <add> return averageRatio; <add> } <add> <add> @Override <add> public String toString() { <add> return "Result{" + <add> "state=" + state + <add> ", settled=" + settled + <add> ", lastRatio=" + lastRatio + <add> ", averageRatio=" + averageRatio + <add> '}'; <add> } <add> } <add>}
Java
bsd-3-clause
3b9e9b839d1069515cf9f273b743f1998da3245a
0
lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon
/* * $Id: SqlStoredProcedures.java,v 1.3 2011-11-19 00:37:11 mellen22 Exp $ */ /* Copyright (c) 2000-2011 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.util; import java.sql.Date; import java.text.SimpleDateFormat; import org.lockss.app.LockssDaemon; import org.lockss.config.ConfigManager; import org.lockss.config.Tdb; import org.lockss.config.TdbAu; import org.lockss.config.TdbPublisher; import org.lockss.config.TdbTitle; import org.lockss.daemon.TitleConfig; import org.lockss.plugin.ArchivalUnit; import org.lockss.plugin.CachedUrl; import org.lockss.plugin.PluginManager; /** * This utility class contains static methods that enable SQL stored * procedures to access LOCKSS functionality. * * @author pgust mellen * */ public class SqlStoredProcedures { static SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); /** * Constructor prevents creating instances. */ private SqlStoredProcedures() { } /** * Return the title from the title title database that corresponds * to the URL of an article in that title. * * @param articleUrl the URL of the article * @return the title for the given URL and null otherwise */ static TdbAu getTdbAuFromArticleUrl(String articleUrl) { if (articleUrl == null) { throw new IllegalArgumentException("null articleUrl"); } // get lockss daemon LockssDaemon daemon = LockssDaemon.getLockssDaemon(); if (daemon == null) { throw new IllegalStateException("no LOCKSS daemon"); } // get the CachedUrl from the article URL PluginManager pluginManager = daemon.getPluginManager(); CachedUrl cu = pluginManager.findCachedUrl(articleUrl); if (cu == null) { return null; } // get the AU from the CachedUrl ArchivalUnit au = cu.getArchivalUnit(); // return the TdbAu from the AU TitleConfig tc = au.getTitleConfig(); if (tc == null) { return null; } return tc.getTdbAu(); } /** * Return the title from the title database that corresponds * to the URL of an article in that title. * * @param articleUrl the URL of the article * @return the title for the given URL */ static public String getTitleFromArticleUrl(String articleUrl) { // get the TdbAu from the AU TdbAu tdbAu = getTdbAuFromArticleUrl(articleUrl); if (tdbAu == null) { throw new IllegalArgumentException( "No title for articleUrl " + articleUrl); } // get the title from the TdbAu String title = tdbAu.getJournalTitle(); // return the title return title; } /** * Return the publisher from the title database that corresponds * to the URL of an article in that publisher. * * @param articleUrl the URL of the article * @return the publisher for the given URL */ static public String getPublisherFromArticleUrl(String articleUrl) { // get the TdbAu from the AU TdbAu tdbAu = getTdbAuFromArticleUrl(articleUrl); if (tdbAu == null) { throw new IllegalArgumentException( "No publisher for articleUrl " + articleUrl); } // get the publisher from the TdbAu String publisher = tdbAu.getTdbPublisher().getName(); // return the publisher return publisher; } /** * Return the ingest date from the title database that corresponds * to the URL of an article in that publisher. * * @param articleUrl the URL of the article * @return the ingest date for the given URL */ static public String getDateOfIngestFromArticleUrl(String articleUrl) { // get lockss daemon LockssDaemon daemon = LockssDaemon.getLockssDaemon(); if (daemon == null) { throw new IllegalStateException("no LOCKSS daemon"); } // get the CachedUrl from the article URL PluginManager pluginManager = daemon.getPluginManager(); CachedUrl cu = pluginManager.findCachedUrl(articleUrl); if (cu == null) { return null; } // get the ingest date from the CachedUrl String ingestDate = cu.getProperties().getProperty(CachedUrl.PROPERTY_LAST_MODIFIED); if (ingestDate == null) { return null; } // get formatted date and return null if there is an exception try { long date = Long.parseLong(ingestDate); ingestDate = formatter.format(new Date(date)); return ingestDate; } catch (NumberFormatException ex) { return null; } } /** * Return the title from the title database that corresponds * to the ISSN of the journal. * * @param journalISSN the ISSN of the journal * @return the title for the given ISSN */ static public String getTitleFromISSN(String journalISSN) { if (journalISSN == null) { throw new IllegalArgumentException("null journalISSN"); } // get the tdb Tdb tdb = ConfigManager.getCurrentConfig().getTdb(); if (tdb == null) { throw new IllegalStateException("No tdb."); } // get the title from the ISSN TdbTitle title = tdb.getTdbTitleByIssn(journalISSN); // return the title return title == null ? null : title.getName(); } /** * Return the publisher from the publisher database that corresponds * to the ISSN of the journal. * * @param journalISSN the ISSN of the journal * @return the publisher for the given ISSN */ static public String getPublisherFromISSN(String journalISSN) { if (journalISSN == null) { throw new IllegalArgumentException("null journalISSN"); } Tdb tdb = ConfigManager.getCurrentConfig().getTdb(); if (tdb == null) { throw new IllegalStateException("No tdb."); } // get the publisher from the ISSN TdbPublisher publisher = tdb.getTdbTitleByIssn(journalISSN).getTdbPublisher(); // return the publisher return publisher == null ? null : publisher.getName(); } }
src/org/lockss/util/SqlStoredProcedures.java
/* * $Id: SqlStoredProcedures.java,v 1.2 2011-11-18 19:28:12 mellen22 Exp $ */ /* Copyright (c) 2000-2011 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.util; import org.lockss.app.LockssDaemon; import org.lockss.config.TdbAu; import org.lockss.daemon.TitleConfig; import org.lockss.plugin.ArchivalUnit; import org.lockss.plugin.CachedUrl; import org.lockss.plugin.PluginManager; /** * This utility class contains static methods that enable SQL stored * procedures to access LOCKSS functionality. * * @author pgust * */ public class SqlStoredProcedures { /** * Constructor prevents creating instances. */ private SqlStoredProcedures() { } /** * Return the title from the title title database that corresponds * to the URL of an article in that title. * * @param articleUrl the URL of the article * @return the title for the given URL and null otherwise */ static TdbAu getTdbAuFromArticleUrl(String articleUrl) { if (articleUrl == null) { throw new IllegalArgumentException("null articleUrl"); } // get lockss daemon LockssDaemon daemon = LockssDaemon.getLockssDaemon(); if (daemon == null) { throw new IllegalStateException("no LOCKSS daemon"); } // get the CachedUrl from the article URL PluginManager pluginManager = daemon.getPluginManager(); CachedUrl cu = pluginManager.findCachedUrl(articleUrl); if (cu == null) { return null; } // get the AU from the CachedUrl ArchivalUnit au = cu.getArchivalUnit(); // return the TdbAu from the AU TitleConfig tc = au.getTitleConfig(); if (tc == null) { return null; } return tc.getTdbAu(); } /** * Return the title from the title database that corresponds * to the URL of an article in that title. * * @param articleUrl the URL of the article * @return the title for the given URL */ static public String getTitleFromArticleUrl(String articleUrl) { // get the TdbAu from the AU TdbAu tdbAu = getTdbAuFromArticleUrl(articleUrl); if (tdbAu == null) { throw new IllegalArgumentException( "No title for articleUrl " + articleUrl); } // get the title from the TdbAu String title = tdbAu.getJournalTitle(); // return the title return title; } /** * Return the publisher from the title database that corresponds * to the URL of an article in that publisher. * * @param articleUrl the URL of the article * @return the publisher for the given URL */ static public String getPublisherFromArticleUrl(String articleUrl) { // get the TdbAu from the AU TdbAu tdbAu = getTdbAuFromArticleUrl(articleUrl); if (tdbAu == null) { throw new IllegalArgumentException( "No publisher for articleUrl " + articleUrl); } // get the publisher from the TdbAu String publisher = tdbAu.getTdbPublisher().getName(); // return the publisher return publisher; } }
New methods. git-svn-id: 293778eaa97c8c94097d610b1bd5133a8f478f36@13418 4f837ed2-42f5-46e7-a7a5-fa17313484d4
src/org/lockss/util/SqlStoredProcedures.java
New methods.
<ide><path>rc/org/lockss/util/SqlStoredProcedures.java <ide> /* <del> * $Id: SqlStoredProcedures.java,v 1.2 2011-11-18 19:28:12 mellen22 Exp $ <add> * $Id: SqlStoredProcedures.java,v 1.3 2011-11-19 00:37:11 mellen22 Exp $ <ide> */ <ide> <ide> /* <ide> */ <ide> package org.lockss.util; <ide> <add>import java.sql.Date; <add>import java.text.SimpleDateFormat; <ide> import org.lockss.app.LockssDaemon; <add>import org.lockss.config.ConfigManager; <add>import org.lockss.config.Tdb; <ide> import org.lockss.config.TdbAu; <add>import org.lockss.config.TdbPublisher; <add>import org.lockss.config.TdbTitle; <ide> import org.lockss.daemon.TitleConfig; <ide> import org.lockss.plugin.ArchivalUnit; <ide> import org.lockss.plugin.CachedUrl; <ide> * This utility class contains static methods that enable SQL stored <ide> * procedures to access LOCKSS functionality. <ide> * <del> * @author pgust <add> * @author pgust mellen <ide> * <ide> */ <ide> public class SqlStoredProcedures { <del> <add> static SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); <add> <ide> /** <ide> * Constructor prevents creating instances. <ide> */ <ide> // return the publisher <ide> return publisher; <ide> } <add> <add> /** <add> * Return the ingest date from the title database that corresponds <add> * to the URL of an article in that publisher. <add> * <add> * @param articleUrl the URL of the article <add> * @return the ingest date for the given URL <add> */ <add> static public String getDateOfIngestFromArticleUrl(String articleUrl) { <add> // get lockss daemon <add> LockssDaemon daemon = LockssDaemon.getLockssDaemon(); <add> if (daemon == null) { <add> throw new IllegalStateException("no LOCKSS daemon"); <add> } <add> <add> // get the CachedUrl from the article URL <add> PluginManager pluginManager = daemon.getPluginManager(); <add> CachedUrl cu = pluginManager.findCachedUrl(articleUrl); <add> if (cu == null) { <add> return null; <add> } <add> <add> // get the ingest date from the CachedUrl <add> String ingestDate = cu.getProperties().getProperty(CachedUrl.PROPERTY_LAST_MODIFIED); <add> if (ingestDate == null) { <add> return null; <add> } <add> <add> // get formatted date and return null if there is an exception <add> try { <add> long date = Long.parseLong(ingestDate); <add> ingestDate = formatter.format(new Date(date)); <add> return ingestDate; <add> } catch (NumberFormatException ex) { <add> return null; <add> } <add> } <add> <add> /** <add> * Return the title from the title database that corresponds <add> * to the ISSN of the journal. <add> * <add> * @param journalISSN the ISSN of the journal <add> * @return the title for the given ISSN <add> */ <add> static public String getTitleFromISSN(String journalISSN) { <add> if (journalISSN == null) { <add> throw new IllegalArgumentException("null journalISSN"); <add> } <add> <add> // get the tdb <add> Tdb tdb = ConfigManager.getCurrentConfig().getTdb(); <add> if (tdb == null) { <add> throw new IllegalStateException("No tdb."); <add> } <add> <add> // get the title from the ISSN <add> TdbTitle title = tdb.getTdbTitleByIssn(journalISSN); <add> <add> // return the title <add> return title == null ? null : title.getName(); <add> } <add> <add> /** <add> * Return the publisher from the publisher database that corresponds <add> * to the ISSN of the journal. <add> * <add> * @param journalISSN the ISSN of the journal <add> * @return the publisher for the given ISSN <add> */ <add> static public String getPublisherFromISSN(String journalISSN) { <add> if (journalISSN == null) { <add> throw new IllegalArgumentException("null journalISSN"); <add> } <add> <add> Tdb tdb = ConfigManager.getCurrentConfig().getTdb(); <add> if (tdb == null) { <add> throw new IllegalStateException("No tdb."); <add> } <add> <add> // get the publisher from the ISSN <add> TdbPublisher publisher = tdb.getTdbTitleByIssn(journalISSN).getTdbPublisher(); <add> <add> // return the publisher <add> return publisher == null ? null : publisher.getName(); <add> } <ide> <ide> }
Java
mit
bc907820aad0068de2a094c5d39bf84e539bfd77
0
SpongePowered/Sponge,SpongePowered/Sponge,SpongePowered/Sponge
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.test.changeblock; import com.google.inject.Inject; import net.kyori.adventure.text.TextComponent; import net.kyori.adventure.text.format.NamedTextColor; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Marker; import org.apache.logging.log4j.MarkerManager; import org.checkerframework.checker.nullness.qual.NonNull; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.block.BlockTypes; import org.spongepowered.api.command.Command; import org.spongepowered.api.command.CommandResult; import org.spongepowered.api.command.parameter.CommandContext; import org.spongepowered.api.data.Keys; import org.spongepowered.api.data.Transaction; import org.spongepowered.api.data.type.MatterStates; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.event.entity.DestructEntityEvent; import org.spongepowered.api.event.entity.HarvestEntityEvent; import org.spongepowered.api.event.entity.SpawnEntityEvent; import org.spongepowered.api.event.lifecycle.RegisterCommandEvent; import org.spongepowered.plugin.PluginContainer; import org.spongepowered.plugin.jvm.Plugin; import org.spongepowered.test.LoadableModule; @Plugin("changeblocktest") public class ChangeBlockTest implements LoadableModule { static final Marker marker = MarkerManager.getMarker("CHANGEBLOCK"); final PluginContainer plugin; boolean cancelAll = false; boolean waterProofRedstone = false; boolean printEntityHarvests = false; boolean printEntitySpawns = false; boolean printEntityDeaths = false; @Inject public ChangeBlockTest(final PluginContainer plugin) { this.plugin = plugin; } @Override public void enable(final CommandContext ctx) { Sponge.getEventManager().registerListeners(this.plugin, new ChangeBlockListener()); Sponge.getEventManager().registerListeners(this.plugin, new HarvestEntityListener()); Sponge.getEventManager().registerListeners(this.plugin, new SpawnEntityListener()); Sponge.getEventManager().registerListeners(this.plugin, new EntityDeathPrinter()); } @Listener public void registerCommands(final RegisterCommandEvent<Command.Parameterized> event) { event.register(this.plugin, Command.builder() .setExecutor(context -> { this.cancelAll = !this.cancelAll; final TextComponent newState = TextComponent.of(this.cancelAll ? "OFF" : "ON", this.cancelAll ? NamedTextColor.GREEN : NamedTextColor.RED); context.sendMessage(TextComponent.of("Turning Block Changes: ").append(newState)); return CommandResult.success(); }) .build(), "toggleBlockChanges" ); event.register(this.plugin, Command.builder() .setExecutor(context -> { this.waterProofRedstone = !this.waterProofRedstone; final TextComponent newState = TextComponent.of(this.waterProofRedstone ? "ON" : "OFF", this.waterProofRedstone ? NamedTextColor.GREEN : NamedTextColor.RED); context.sendMessage(TextComponent.of("Waterproof Redstone : ").append(newState)); return CommandResult.success(); }) .build(), "toggleRedstoneWaterProofing" ); event.register(this.plugin, Command.builder() .setExecutor(context -> { this.printEntityHarvests = !this.printEntityHarvests; final TextComponent newState = TextComponent.of(this.printEntityHarvests ? "ON" : "OFF", this.printEntityHarvests ? NamedTextColor.GREEN : NamedTextColor.RED); context.sendMessage(TextComponent.of("Logging Entity Harvests : ").append(newState)); return CommandResult.success(); }) .build(), "toggleEntityHarvestPrinting" ); event.register(this.plugin, Command.builder() .setExecutor(context -> { this.printEntityDeaths = !this.printEntityDeaths; final TextComponent newState = TextComponent.of(this.printEntityDeaths ? "ON" : "OFF", this.printEntityDeaths ? NamedTextColor.GREEN : NamedTextColor.RED); context.sendMessage(TextComponent.of("Logging Entity Harvests : ").append(newState)); return CommandResult.success(); }) .build(), "toggleEntityDeathPrinting" ); event.register(this.plugin, Command.builder() .setExecutor(context -> { this.printEntitySpawns = !this.printEntitySpawns; final TextComponent newState = TextComponent.of(this.printEntitySpawns ? "ON" : "OFF", this.printEntitySpawns ? NamedTextColor.GREEN : NamedTextColor.RED); context.sendMessage(TextComponent.of("Logging Entity Spawns : ").append(newState)); return CommandResult.success(); }) .build(), "toggleEntitySpawnPrinting" ); } public class HarvestEntityListener { @Listener public void onEntityHarvest(final HarvestEntityEvent event) { if (!ChangeBlockTest.this.printEntityHarvests) { return; } final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/*************"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/* HarvestEntityEvent"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ Cause:"); for (final Object o : event.getCause()) { pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ - " + o); } pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); } } public class SpawnEntityListener { @Listener public void onEntitySpawn(final SpawnEntityEvent event) { if (!ChangeBlockTest.this.printEntitySpawns) { return; } final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/*************"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/* SpawnEntityEvent"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ Cause:"); for (final Object o : event.getCause()) { pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ - " + o); } pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); } } public class EntityDeathPrinter { @Listener public void onEntitySpawn(final DestructEntityEvent.Death event) { if (!ChangeBlockTest.this.printEntityDeaths) { return; } final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/*************"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/* DestructEntityEvent.Death"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ Cause:"); for (final Object o : event.getCause()) { pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ - " + o); } pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); } } public class ChangeBlockListener { @Listener public void onChangeBlock(final ChangeBlockEvent.Post post) { final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/*************"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/* ChangeBlockEvent"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ Cause:"); for (final Object o : post.getCause()) { pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ - " + o); } pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); if (ChangeBlockTest.this.cancelAll && post.getCause().containsType(BlockSnapshot.class)) { post.setCancelled(true); } if (ChangeBlockTest.this.waterProofRedstone) { for (Transaction<@NonNull BlockSnapshot> transaction : post.getTransactions()) { final boolean wasRedstone = transaction.getOriginal().getState().getType() == BlockTypes.REDSTONE_WIRE.get(); final boolean becomesLiquid = transaction.getFinal().getState().get(Keys.MATTER_STATE).get() == MatterStates.LIQUID.get(); if (wasRedstone && becomesLiquid) { post.setCancelled(true); return; } } } } } }
testplugins/src/main/java/org/spongepowered/test/changeblock/ChangeBlockTest.java
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.test.changeblock; import com.google.inject.Inject; import net.kyori.adventure.text.TextComponent; import net.kyori.adventure.text.format.NamedTextColor; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Marker; import org.apache.logging.log4j.MarkerManager; import org.spongepowered.api.Sponge; import org.spongepowered.api.block.BlockSnapshot; import org.spongepowered.api.command.Command; import org.spongepowered.api.command.CommandResult; import org.spongepowered.api.command.parameter.CommandContext; import org.spongepowered.api.event.Listener; import org.spongepowered.api.event.block.ChangeBlockEvent; import org.spongepowered.api.event.lifecycle.RegisterCommandEvent; import org.spongepowered.plugin.PluginContainer; import org.spongepowered.plugin.jvm.Plugin; import org.spongepowered.test.LoadableModule; @Plugin("changeblocktest") public class ChangeBlockTest implements LoadableModule { private static final Marker marker = MarkerManager.getMarker("CHANGEBLOCK"); private final PluginContainer plugin; private boolean cancelAll = false; @Inject public ChangeBlockTest(final PluginContainer plugin) { this.plugin = plugin; } @Override public void enable(final CommandContext ctx) { Sponge.getEventManager().registerListeners(this.plugin, new ChangeBlockListener()); } @Listener public void registerCommands(final RegisterCommandEvent<Command.Parameterized> event) { event.register(this.plugin, Command.builder() .setExecutor(context -> { this.cancelAll = !this.cancelAll; final TextComponent newState = TextComponent.of(this.cancelAll ? "ON" : "OFF", this.cancelAll ? NamedTextColor.GREEN : NamedTextColor.RED); context.sendMessage(TextComponent.of("Turning Block Changes: ").append(newState)); return CommandResult.success(); }) .build(), "toggleBlockChanges" ); } public class ChangeBlockListener { @Listener public void onChangeBlock(final ChangeBlockEvent.Post post) { final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/*************"); pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/* ChangeBlockEvent"); pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/"); pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/ Cause:"); for (final Object o : post.getCause()) { pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/ - " + o); } pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/"); if (ChangeBlockTest.this.cancelAll && post.getCause().containsType(BlockSnapshot.class)) { post.setCancelled(true); } } } }
Add enhanced logging to ChangeBlockTest to log our now-implemented events Signed-off-by: Gabriel Harris-Rouquette <[email protected]>
testplugins/src/main/java/org/spongepowered/test/changeblock/ChangeBlockTest.java
Add enhanced logging to ChangeBlockTest to log our now-implemented events
<ide><path>estplugins/src/main/java/org/spongepowered/test/changeblock/ChangeBlockTest.java <ide> import org.apache.logging.log4j.Logger; <ide> import org.apache.logging.log4j.Marker; <ide> import org.apache.logging.log4j.MarkerManager; <add>import org.checkerframework.checker.nullness.qual.NonNull; <ide> import org.spongepowered.api.Sponge; <ide> import org.spongepowered.api.block.BlockSnapshot; <add>import org.spongepowered.api.block.BlockTypes; <ide> import org.spongepowered.api.command.Command; <ide> import org.spongepowered.api.command.CommandResult; <ide> import org.spongepowered.api.command.parameter.CommandContext; <add>import org.spongepowered.api.data.Keys; <add>import org.spongepowered.api.data.Transaction; <add>import org.spongepowered.api.data.type.MatterStates; <ide> import org.spongepowered.api.event.Listener; <ide> import org.spongepowered.api.event.block.ChangeBlockEvent; <add>import org.spongepowered.api.event.entity.DestructEntityEvent; <add>import org.spongepowered.api.event.entity.HarvestEntityEvent; <add>import org.spongepowered.api.event.entity.SpawnEntityEvent; <ide> import org.spongepowered.api.event.lifecycle.RegisterCommandEvent; <ide> import org.spongepowered.plugin.PluginContainer; <ide> import org.spongepowered.plugin.jvm.Plugin; <ide> @Plugin("changeblocktest") <ide> public class ChangeBlockTest implements LoadableModule { <ide> <del> private static final Marker marker = MarkerManager.getMarker("CHANGEBLOCK"); <del> <del> private final PluginContainer plugin; <del> private boolean cancelAll = false; <add> static final Marker marker = MarkerManager.getMarker("CHANGEBLOCK"); <add> <add> final PluginContainer plugin; <add> boolean cancelAll = false; <add> boolean waterProofRedstone = false; <add> boolean printEntityHarvests = false; <add> boolean printEntitySpawns = false; <add> boolean printEntityDeaths = false; <ide> <ide> @Inject <ide> public ChangeBlockTest(final PluginContainer plugin) { <ide> @Override <ide> public void enable(final CommandContext ctx) { <ide> Sponge.getEventManager().registerListeners(this.plugin, new ChangeBlockListener()); <add> Sponge.getEventManager().registerListeners(this.plugin, new HarvestEntityListener()); <add> Sponge.getEventManager().registerListeners(this.plugin, new SpawnEntityListener()); <add> Sponge.getEventManager().registerListeners(this.plugin, new EntityDeathPrinter()); <ide> } <ide> <ide> @Listener <ide> event.register(this.plugin, Command.builder() <ide> .setExecutor(context -> { <ide> this.cancelAll = !this.cancelAll; <del> final TextComponent newState = TextComponent.of(this.cancelAll ? "ON" : "OFF", this.cancelAll ? NamedTextColor.GREEN : NamedTextColor.RED); <add> final TextComponent newState = TextComponent.of(this.cancelAll ? "OFF" : "ON", this.cancelAll ? NamedTextColor.GREEN : NamedTextColor.RED); <ide> context.sendMessage(TextComponent.of("Turning Block Changes: ").append(newState)); <ide> return CommandResult.success(); <ide> }) <ide> .build(), "toggleBlockChanges" <ide> ); <del> } <del> <add> event.register(this.plugin, Command.builder() <add> .setExecutor(context -> { <add> this.waterProofRedstone = !this.waterProofRedstone; <add> final TextComponent newState = TextComponent.of(this.waterProofRedstone ? "ON" : "OFF", this.waterProofRedstone ? NamedTextColor.GREEN : NamedTextColor.RED); <add> context.sendMessage(TextComponent.of("Waterproof Redstone : ").append(newState)); <add> return CommandResult.success(); <add> }) <add> .build(), "toggleRedstoneWaterProofing" <add> ); <add> event.register(this.plugin, Command.builder() <add> .setExecutor(context -> { <add> this.printEntityHarvests = !this.printEntityHarvests; <add> final TextComponent newState = TextComponent.of(this.printEntityHarvests ? "ON" : "OFF", this.printEntityHarvests ? NamedTextColor.GREEN : NamedTextColor.RED); <add> context.sendMessage(TextComponent.of("Logging Entity Harvests : ").append(newState)); <add> return CommandResult.success(); <add> }) <add> .build(), "toggleEntityHarvestPrinting" <add> ); <add> event.register(this.plugin, Command.builder() <add> .setExecutor(context -> { <add> this.printEntityDeaths = !this.printEntityDeaths; <add> final TextComponent newState = TextComponent.of(this.printEntityDeaths ? "ON" : "OFF", this.printEntityDeaths ? NamedTextColor.GREEN : NamedTextColor.RED); <add> context.sendMessage(TextComponent.of("Logging Entity Harvests : ").append(newState)); <add> return CommandResult.success(); <add> }) <add> .build(), "toggleEntityDeathPrinting" <add> ); <add> event.register(this.plugin, Command.builder() <add> .setExecutor(context -> { <add> this.printEntitySpawns = !this.printEntitySpawns; <add> final TextComponent newState = TextComponent.of(this.printEntitySpawns ? "ON" : "OFF", this.printEntitySpawns ? NamedTextColor.GREEN : NamedTextColor.RED); <add> context.sendMessage(TextComponent.of("Logging Entity Spawns : ").append(newState)); <add> return CommandResult.success(); <add> }) <add> .build(), "toggleEntitySpawnPrinting" <add> ); <add> } <add> <add> public class HarvestEntityListener { <add> <add> @Listener <add> public void onEntityHarvest(final HarvestEntityEvent event) { <add> if (!ChangeBlockTest.this.printEntityHarvests) { <add> return; <add> } <add> final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/*************"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/* HarvestEntityEvent"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ Cause:"); <add> for (final Object o : event.getCause()) { <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ - " + o); <add> } <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); <add> } <add> } <add> <add> public class SpawnEntityListener { <add> @Listener <add> public void onEntitySpawn(final SpawnEntityEvent event) { <add> if (!ChangeBlockTest.this.printEntitySpawns) { <add> return; <add> } <add> final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/*************"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/* SpawnEntityEvent"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ Cause:"); <add> for (final Object o : event.getCause()) { <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ - " + o); <add> } <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); <add> } <add> } <add> <add> public class EntityDeathPrinter { <add> @Listener <add> public void onEntitySpawn(final DestructEntityEvent.Death event) { <add> if (!ChangeBlockTest.this.printEntityDeaths) { <add> return; <add> } <add> final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/*************"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/* DestructEntityEvent.Death"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ Cause:"); <add> for (final Object o : event.getCause()) { <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ - " + o); <add> } <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); <add> } <add> } <ide> <ide> public class ChangeBlockListener { <ide> @Listener <ide> public void onChangeBlock(final ChangeBlockEvent.Post post) { <ide> final Logger pluginLogger = ChangeBlockTest.this.plugin.getLogger(); <del> pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/*************"); <del> pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/* ChangeBlockEvent"); <del> pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/"); <del> pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/ Cause:"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/*************"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/* ChangeBlockEvent"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ Cause:"); <ide> for (final Object o : post.getCause()) { <del> pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/ - " + o); <del> } <del> pluginLogger.log(Level.FATAL, ChangeBlockTest.marker, "/"); <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/ - " + o); <add> } <add> pluginLogger.log(Level.INFO, ChangeBlockTest.marker, "/"); <ide> if (ChangeBlockTest.this.cancelAll && post.getCause().containsType(BlockSnapshot.class)) { <ide> post.setCancelled(true); <ide> } <add> if (ChangeBlockTest.this.waterProofRedstone) { <add> for (Transaction<@NonNull BlockSnapshot> transaction : post.getTransactions()) { <add> final boolean wasRedstone = transaction.getOriginal().getState().getType() == BlockTypes.REDSTONE_WIRE.get(); <add> final boolean becomesLiquid = transaction.getFinal().getState().get(Keys.MATTER_STATE).get() == MatterStates.LIQUID.get(); <add> if (wasRedstone && becomesLiquid) { <add> post.setCancelled(true); <add> return; <add> } <add> } <add> <add> } <ide> } <ide> } <ide> }
Java
apache-2.0
475de9eca457677148a2ded57417eb6c8a7ccf54
0
sdeleuze/reactor-core,reactor/reactor-core,sdeleuze/reactor-core,sdeleuze/reactor-core,sdeleuze/reactor-core
/* * Copyright (c) 2011-2016 Pivotal Software Inc, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.publisher; import java.util.Iterator; import java.util.Queue; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import reactor.core.flow.Fuseable; import reactor.core.queue.QueueSupplier; import reactor.core.state.Backpressurable; import reactor.core.state.Introspectable; import reactor.core.subscriber.BlockingIterable; import reactor.core.subscriber.ConsumerSubscriber; import reactor.core.subscriber.SignalEmitter; import reactor.core.subscriber.SubscriberWithContext; import reactor.core.timer.Timer; import reactor.core.util.Assert; import reactor.core.util.Logger; import reactor.core.util.PlatformDependent; import reactor.core.util.ReactiveStateUtils; import reactor.fn.BiConsumer; import reactor.fn.BiFunction; import reactor.fn.Consumer; import reactor.fn.Function; import reactor.fn.Supplier; import reactor.fn.tuple.Tuple; import reactor.fn.tuple.Tuple2; import reactor.fn.tuple.Tuple3; import reactor.fn.tuple.Tuple4; import reactor.fn.tuple.Tuple5; import reactor.fn.tuple.Tuple6; /** * A Reactive Streams {@link Publisher} with basic rx operators that emits 0 to N elements, and then completes * (successfully or with an error). * * <p> * <img width="640" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flux.png" alt=""> * <p> * * <p>It is intended to be used in implementations and return types. Input parameters should keep using raw * {@link Publisher} as much as possible. * * <p>If it is known that the underlying {@link Publisher} will emit 0 or 1 element, {@link Mono} should be used * instead. * * @author Sebastien Deleuze * @author Stephane Maldini * @see Mono * @since 2.5 */ public abstract class Flux<T> implements Publisher<T>, Introspectable { // ============================================================================================================== // Static Generators // ============================================================================================================== static final IdentityFunction IDENTITY_FUNCTION = new IdentityFunction(); static final Flux<?> EMPTY = from(Mono.empty()); /** * Select the fastest source who won the "ambiguous" race and emitted first onNext or onComplete or onError * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> <p> * * @param sources The competing source publishers * @param <I> The source type of the data sequence * * @return a new {@link Flux} eventually subscribed to one of the sources or empty */ @SuppressWarnings({"unchecked", "varargs"}) @SafeVarargs public static <I> Flux<I> amb(Publisher<? extends I>... sources) { return new FluxAmb<>(sources); } /** * Select the fastest source who won the "ambiguous" race and emitted first onNext or onComplete or onError * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> <p> * * @param sources The competing source publishers * @param <I> The source type of the data sequence * * @return a new {@link Flux} eventually subscribed to one of the sources or empty */ @SuppressWarnings("unchecked") public static <I> Flux<I> amb(Iterable<? extends Publisher<? extends I>> sources) { if (sources == null) { return empty(); } return new FluxAmb<>(sources); } /** * Concat all sources emitted as an onNext signal from a parent {@link Publisher}. * A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned {@link Publisher} which will stop listening if the main sequence has also completed. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concatinner.png" alt=""> * <p> * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new {@link Flux} concatenating all inner sources sequences until complete or error */ @SuppressWarnings("unchecked") public static <I> Flux<I> concat(Publisher<? extends Publisher<? extends I>> sources) { return new FluxFlatMap<>( sources, IDENTITY_FUNCTION, false, 1, QueueSupplier.<I>one(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<I>xs() ); } /** * Concat all sources pulled from the supplied * {@link Iterator} on {@link Publisher#subscribe} from the passed {@link Iterable} until {@link Iterator#hasNext} * returns false. A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned Publisher. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * <p> * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new {@link Flux} concatenating all source sequences */ public static <I> Flux<I> concat(Iterable<? extends Publisher<? extends I>> sources) { return concat(fromIterable(sources)); } /** * Concat all sources pulled from the given {@link Publisher} array. * A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned Publisher. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * <p> * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new {@link Flux} concatenating all source sequences */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) public static <I> Flux<I> concat(Publisher<? extends I>... sources) { if (sources == null || sources.length == 0) { return empty(); } if (sources.length == 1) { return from(sources[0]); } return concat(fromArray(sources)); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * <p> * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param <T> The type of the data sequence * * @return a new {@link Flux} */ public static <T> Flux<T> create(Consumer<SubscriberWithContext<T, Void>> requestConsumer) { return create(requestConsumer, null, null); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * The argument {@code contextFactory} is executed once by new subscriber to generate a context shared by every * request calls. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * <p> * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param contextFactory A {@link Function} called for every new subscriber returning an immutable context (IO * connection...) * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a new {@link Flux} */ public static <T, C> Flux<T> create(Consumer<SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory) { return create(requestConsumer, contextFactory, null); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * The argument {@code contextFactory} is executed once by new subscriber to generate a context shared by every * request calls. The argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, * onComplete, onError). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * <p> * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param contextFactory A {@link Function} called once for every new subscriber returning an immutable context (IO * connection...) * @param shutdownConsumer A {@link Consumer} called once everytime a subscriber terminates: cancel, onComplete(), * onError() * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a new {@link Flux} */ public static <T, C> Flux<T> create(final Consumer<SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory, Consumer<C> shutdownConsumer) { Assert.notNull(requestConsumer, "A data producer must be provided"); return new FluxGenerate.FluxForEach<>(requestConsumer, contextFactory, shutdownConsumer); } /** * Run onNext, onComplete and onError on a supplied * {@link Consumer} {@link Runnable} scheduler e.g. {@link SchedulerGroup#call}. * * <p> * Typically used for fast publisher, slow consumer(s) scenarios. * It naturally combines with {@link SchedulerGroup#single} and {@link SchedulerGroup#async} which implement * fast async event loops. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dispatchon.png" alt=""> * <p> * {@code flux.dispatchOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param source the {@link Publisher} to dispatch asynchronously * @param scheduler a checked factory for {@link Consumer} of {@link Runnable} * @param delayError true if errors should be delayed after consuming any available backlog * @param prefetch the maximum in flight data to produce from the passed source {@link Publisher} * * @return a {@link Flux} consuming asynchronously */ public static <T> Flux<T> dispatchOn(Publisher<T> source, Callable<? extends Consumer<Runnable>> scheduler, boolean delayError, int prefetch, Supplier<? extends Queue<T>> queueProvider) { if (source instanceof Fuseable.ScalarSupplier) { @SuppressWarnings("unchecked") T value = ((Fuseable.ScalarSupplier<T>)source).get(); return new FluxPublishOnValue<>(value, scheduler, true); } return new FluxDispatchOn<>(source, scheduler, delayError, prefetch, queueProvider); } /** * Create a {@link Flux} that completes without emitting any item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/empty.png" alt=""> * <p> * @param <T> the reified type of the target {@link Subscriber} * * @return an empty {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> empty() { return (Flux<T>) EMPTY; } /** * Create a {@link Flux} that completes with the specified error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/error.png" alt=""> * <p> * @param error the error to signal to each {@link Subscriber} * @param <T> the reified type of the target {@link Subscriber} * * @return a new failed {@link Flux} */ public static <T> Flux<T> error(Throwable error) { return Mono.<T>error(error).flux(); } /** * Consume the passed * {@link Publisher} source and transform its sequence of T into a N sequences of V via the given {@link Function}. * The produced sequences {@link Publisher} will be merged back in the returned {@link Flux}. * The backpressure will apply using the provided bufferSize which will actively consume each sequence (and the * main one) and replenish its request cycle on a threshold free capacity. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmap.png" alt=""> * <p> * @param source the source to flatten * @param mapper the function to transform the upstream sequence into N sub-sequences * @param concurrency the maximum alive transformations at a given time * @param bufferSize the bounded capacity for each individual merged sequence * @param delayError Consume all pending sequence backlogs before replaying any captured error * @param <T> the source type * @param <V> the produced merged type * * @return a new merged {@link Flux} */ public static <T, V> Flux<V> flatMap( Publisher<? extends T> source, Function<? super T, ? extends Publisher<? extends V>> mapper, int concurrency, int bufferSize, boolean delayError) { return new FluxFlatMap<>( source, mapper, delayError, concurrency, QueueSupplier.<V>get(concurrency), bufferSize, QueueSupplier.<V>get(bufferSize) ); } /** * Expose the specified {@link Publisher} with the {@link Flux} API. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/from.png" alt=""> * <p> * @param source the source to decorate * @param <T> the source sequence type * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> from(Publisher<? extends T> source) { if (source instanceof Flux) { return (Flux<T>) source; } if (source instanceof Supplier) { T t = ((Supplier<T>) source).get(); if (t != null) { return just(t); } } return FluxSource.wrap(source); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Iterable}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromarray.png" alt=""> * <p> * @param array the array to read data from * @param <T> the {@link Publisher} type to stream * * @return a new {@link Flux} */ public static <T> Flux<T> fromArray(T[] array) { if (array == null || array.length == 0) { return empty(); } if (array.length == 1) { return just(array[0]); } return new FluxArray<>(array); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Iterable}. * A new iterator will be created for each subscriber. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromiterable.png" alt=""> * <p> * @param it the {@link Iterable} to read data from * @param <T> the {@link Iterable} type to stream * * @return a new {@link Flux} */ public static <T> Flux<T> fromIterable(Iterable<? extends T> it) { FluxGenerate.IterableSequencer<T> iterablePublisher = new FluxGenerate.IterableSequencer<>(it); return create(iterablePublisher, iterablePublisher); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Tuple}. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromtuple.png" alt=""> * <p> <p> * * @param tuple the {@link Tuple} to read data from * * @return a new {@link Flux} */ public static Flux<Object> fromTuple(Tuple tuple) { return fromArray(tuple.toArray()); } /** * Create a {@link Flux} reacting on requests with the passed {@link BiConsumer}. The argument {@code * contextFactory} is executed once by new subscriber to generate a context shared by every request calls. The * argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, onComplete, * onError). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generate.png" alt=""> * <p> * @param requestConsumer A {@link BiConsumer} with left argument request and right argument target subscriber * @param contextFactory A {@link Function} called once for every new subscriber returning an immutable context (IO * connection...) * @param shutdownConsumer A {@link Consumer} called once everytime a subscriber terminates: cancel, onComplete(), * onError() * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a fresh Reactive {@link Flux} publisher ready to be subscribed */ public static <T, C> Flux<T> generate(BiConsumer<Long, SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory, Consumer<C> shutdownConsumer) { return new FluxGenerate<>(new FluxGenerate.RecursiveConsumer<>(requestConsumer), contextFactory, shutdownConsumer); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N seconds on * the given timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * <p> * @param seconds The number of seconds to wait before the next increment * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long seconds) { return interval(seconds, TimeUnit.SECONDS); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N period of time unit on * the global timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * <p> * @param period The the time relative to given unit to wait before the next increment * @param unit The unit of time * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long period, TimeUnit unit) { return interval(period, unit, Timer.global()); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N period of time unit on * the given timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * <p> * @param period The the time relative to given unit to wait before the next increment * @param unit The unit of time * @param timer a {@link Timer} instance * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long period, TimeUnit unit, Timer timer) { long timespan = TimeUnit.MILLISECONDS.convert(period, unit); Assert.isTrue(timespan >= timer.period(), "The delay " + period + "ms cannot be less than the timer resolution" + "" + timer.period() + "ms"); return new FluxInterval(timer, period, unit, period); } /** * Create a new {@link Flux} that emits the specified items and then complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/justn.png" alt=""> * <p> * @param data the consecutive data objects to emit * @param <T> the emitted data type * * @return a new {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <T> Flux<T> just(T... data) { return fromArray(data); } /** * Create a new {@link Flux} that will only emit the passed data then onComplete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/just.png" alt=""> * <p> * @param data the unique data to emit * @param <T> the emitted data type * * @return a new {@link Flux} */ public static <T> Flux<T> just(T data) { return new FluxJust<>(data); } /** * Observe Reactive Streams signals matching the passed flags {@code options} and use {@link Logger} support to * handle trace * implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: * <pre> * flux.log("category", Level.INFO, Logger.ON_NEXT | LOGGER.ON_ERROR) * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * @param source the source {@link Publisher} to log * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * @param options a flag option that can be mapped with {@link Logger#ON_NEXT} etc. * * @param <T> the {@link Subscriber} type target * * @return a logged {@link Flux} */ public static <T> Flux<T> log(Publisher<T> source, String category, Level level, int options) { return new FluxLog<>(source, category, level, options); } /** * Create a {@link Flux} that will transform all signals into a target type. OnError will be transformed into * completion signal after its mapping callback has been applied. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/mapsignal.png" alt=""> * <p> * @param source the source {@link Publisher} to map * @param mapperOnNext the {@link Function} to call on next data and returning the target transformed data * @param mapperOnError the {@link Function} to call on error signal and returning the target transformed data * @param mapperOnComplete the {@link Function} to call on complete signal and returning the target transformed data * @param <T> the input publisher type * @param <V> the output {@link Publisher} type target * * @return a new {@link Flux} */ public static <T, V> Flux<V> mapSignal(Publisher<T> source, Function<? super T, ? extends V> mapperOnNext, Function<Throwable, ? extends V> mapperOnError, Supplier<? extends V> mapperOnComplete) { return new FluxMapSignal<>(source, mapperOnNext, mapperOnError, mapperOnComplete); } /** * Merge emitted {@link Publisher} sequences by the passed {@link Publisher} into an interleaved merged sequence. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/mergeinner.png" alt=""> * <p> * @param source a {@link Publisher} of {@link Publisher} sequence to merge * @param <T> the merged type * * @return a merged {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> merge(Publisher<? extends Publisher<? extends T>> source) { return new FluxFlatMap<>( source, IDENTITY_FUNCTION, false, PlatformDependent.SMALL_BUFFER_SIZE, QueueSupplier.<T>small(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<T>xs() ); } /** * Merge emitted {@link Publisher} sequences from the passed {@link Iterable} into an interleaved merged sequence. * {@link Iterable#iterator()} will be called for each {@link Publisher#subscribe}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * <p> * @param sources the {@link Iterable} to lazily iterate on {@link Publisher#subscribe(Subscriber)} * @param <I> The source type of the data sequence * * @return a fresh Reactive {@link Flux} publisher ready to be subscribed */ public static <I> Flux<I> merge(Iterable<? extends Publisher<? extends I>> sources) { return merge(fromIterable(sources)); } /** * Merge emitted {@link Publisher} sequences from the passed {@link Publisher} array into an interleaved merged * sequence. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * <p> * @param sources the {@link Publisher} array to iterate on {@link Publisher#subscribe(Subscriber)} * @param <I> The source type of the data sequence * * @return a fresh Reactive {@link Flux} publisher ready to be subscribed */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) public static <I> Flux<I> merge(Publisher<? extends I>... sources) { if (sources == null || sources.length == 0) { return empty(); } if (sources.length == 1) { return from(sources[0]); } return merge(fromArray(sources)); } /** * Create a {@link Flux} that will never signal any data, error or completion signal. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/never.png" alt=""> * <p> * @param <T> the {@link Subscriber} type target * * @return a never completing {@link Flux} */ public static <T> Flux<T> never() { return FluxNever.instance(); } /** * Create a {@link Flux} that will fallback to the produced {@link Publisher} given an onError signal. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorresumewith.png" alt=""> * <p> * @param <T> the {@link Subscriber} type target * * @return a resilient {@link Flux} */ public static <T> Flux<T> onErrorResumeWith( Publisher<? extends T> source, Function<Throwable, ? extends Publisher<? extends T>> fallback) { return new FluxResume<>(source, fallback); } /** * Run subscribe, onSubscribe and request on a supplied * {@link Consumer} {@link Runnable} scheduler like {@link SchedulerGroup}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/publishon.png" alt=""> * <p> * <p> * Typically used for slow publisher e.g., blocking IO, fast consumer(s) scenarios. * It naturally combines with {@link SchedulerGroup#io} which implements work-queue thread dispatching. * * <p> * {@code flux.publishOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param source a {@link Publisher} source to publish from the given scheduler * @param schedulerFactory a checked factory for {@link Consumer} of {@link Runnable} * * @return a {@link Flux} publishing asynchronously */ public static <T> Flux<T> publishOn(Publisher<? extends T> source, Callable<? extends Consumer<Runnable>> schedulerFactory) { return new FluxPublishOn<>(source, schedulerFactory); } /** * Create a {@link Flux} reacting on subscribe with the passed {@link Consumer}. The argument {@code * sessionConsumer} is executed once by new subscriber to generate a {@link SignalEmitter} context ready to accept * signals. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/yield.png" alt=""> * <p> * @param sessionConsumer A {@link Consumer} called once everytime a subscriber subscribes * @param <T> The type of the data sequence * * @return a fresh Reactive {@link Flux} publisher ready to be subscribed */ public static <T> Flux<T> yield(Consumer<? super SignalEmitter<T>> sessionConsumer) { return new FluxYieldingEmitter<>(sessionConsumer); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * <p> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <O> The produced output after transformation by the combinator * * @return a zipped {@link Flux} */ public static <T1, T2, O> Flux<O> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, final BiFunction<? super T1, ? super T2, ? extends O> combinator) { return zip(new Function<Object[], O>() { @Override @SuppressWarnings("unchecked") public O apply(Object[] tuple) { return combinator.apply((T1)tuple[0], (T2)tuple[1]); } }, source1, source2); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * * @return a zipped {@link Flux} */ public static <T1, T2> Flux<Tuple2<T1, T2>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2) { return zip(Tuple.<T1, T2>fn2(), source1, source2); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static <T1, T2, T3> Flux<Tuple3<T1, T2, T3>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3) { return zip(Tuple.<T1, T2, T3>fn3(), source1, source2, source3); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4> Flux<Tuple4<T1, T2, T3, T4>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4) { return zip(Tuple.<T1, T2, T3, T4>fn4(), source1, source2, source3, source4); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5> Flux<Tuple5<T1, T2, T3, T4, T5>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5) { return zip(Tuple.<T1, T2, T3, T4, T5>fn5(), source1, source2, source3, source4, source5); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6> Flux<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6) { return zip(Tuple.<T1, T2, T3, T4, T5, T6>fn6(), source1, source2, source3, source4, source5, source6); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * of the most recent items emitted by each source until any of them completes. Errors will immediately be * forwarded. * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static Flux<Tuple> zip(Iterable<? extends Publisher<?>> sources) { return zip(sources, Tuple.fnAny()); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <O> the combined produced type * * @return a zipped {@link Flux} */ public static <O> Flux<O> zip(Iterable<? extends Publisher<?>> sources, final Function<? super Object[], ? extends O> combinator) { return zip(sources, PlatformDependent.XS_BUFFER_SIZE, combinator); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * @param prefetch the inner source request size * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <O> the combined produced type * * @return a zipped {@link Flux} */ public static <O> Flux<O> zip(Iterable<? extends Publisher<?>> sources, int prefetch, final Function<? super Object[], ? extends O> combinator) { if (sources == null) { return empty(); } return new FluxZip<>(sources, combinator, QueueSupplier.get(prefetch), prefetch); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * <p> * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param sources the {@link Publisher} array to iterate on {@link Publisher#subscribe(Subscriber)} * @param <O> the combined produced type * * @return a zipped {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <I, O> Flux<O> zip( final Function<? super Object[], ? extends O> combinator, Publisher<? extends I>... sources) { return zip(combinator, PlatformDependent.XS_BUFFER_SIZE, sources); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * <p> * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param prefetch individual source request size * @param sources the {@link Publisher} array to iterate on {@link Publisher#subscribe(Subscriber)} * @param <O> the combined produced type * * @return a zipped {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <I, O> Flux<O> zip( final Function<? super Object[], ? extends O> combinator, int prefetch, Publisher<? extends I>... sources) { if (sources == null) { return empty(); } return new FluxZip<>(sources, combinator, QueueSupplier.get(prefetch), prefetch); } // ============================================================================================================== // Instance Operators // ============================================================================================================== protected Flux() { } /** * Immediately apply the given transformation to this {@link Flux} in order to generate a target {@link Publisher} type. * * {@code flux.as(Mono::from).subscribe(Subscribers.unbounded()) } * * @param transformer the {@link Function} to immediately map this {@link Flux} into a target {@link Publisher} * instance. * @param <P> the returned {@link Publisher} sequence type * * @return a new {@link Flux} */ public final <V, P extends Publisher<V>> P as(Function<? super Flux<T>, P> transformer) { return transformer.apply(this); } /** * Return a {@code Mono<Void>} that completes when this {@link Flux} completes. * This will actively ignore the sequence and only replay completion or error signals. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/after.png" alt=""> * <p> * @return a new {@link Mono} */ @SuppressWarnings("unchecked") public final Mono<Void> after() { return (Mono<Void>)new MonoIgnoreElements<>(this); } /** * Emit from the fastest first sequence between this publisher and the given publisher * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> * @param other the {@link Publisher} to race with * * @return the fastest sequence */ public final Flux<T> ambWith(Publisher<? extends T> other) { return amb(this, other); } /** * Hint {@link Subscriber} to this {@link Flux} a preferred available capacity should be used. * {@link #toIterable()} can for instance use introspect this value to supply an appropriate queueing strategy. * * @param capacity the maximum capacity (in flight onNext) the return {@link Publisher} should expose * * @return a bounded {@link Flux} */ public final Flux<T> capacity(long capacity) { return new FluxBounded<>(this, capacity); } /** * Like {@link #flatMap(Function)}, but concatenate emissions instead of merging (no interleave). * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concatmap.png" alt=""> * <p> * @param mapper the function to transform this sequence of T into concated sequences of R * @param <R> the produced concated type * * @return a new {@link Flux} */ public final <R> Flux<R> concatMap(Function<? super T, ? extends Publisher<? extends R>> mapper) { return new FluxFlatMap<>( this, mapper, false, 1, QueueSupplier.<R>one(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<R>xs() ); } /** * Concatenate emissions of this {@link Flux} with the provided {@link Publisher} (no interleave). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * <p> * @param other the {@link Publisher} sequence to concat after this {@link Flux} * * @return a new {@link Flux} */ public final Flux<T> concatWith(Publisher<? extends T> other) { return concat(this, other); } /** * Introspect this {@link Flux} graph * * @return {@link ReactiveStateUtils} {@literal Graph} representation of the operational flow */ public final ReactiveStateUtils.Graph debug() { return ReactiveStateUtils.scan(this); } /** * Provide a default unique value if this sequence is completed without any data * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/defaultifempty.png" alt=""> * <p> * @param defaultV the alternate value if this sequence is empty * * @return a new {@link Flux} */ public final Flux<T> defaultIfEmpty(T defaultV) { return new FluxSwitchIfEmpty<>(this, just(defaultV)); } /** * Run onNext, onComplete and onError on a supplied * {@link Consumer} {@link Runnable} scheduler factory like {@link SchedulerGroup}. * * <p> * Typically used for fast publisher, slow consumer(s) scenarios. * It naturally combines with {@link SchedulerGroup#single} and {@link SchedulerGroup#async} which implement * fast async event loops. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dispatchon.png" alt=""> * <p> * {@code flux.dispatchOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param scheduler a checked factory for {@link Consumer} of {@link Runnable} * * @return a {@link Flux} consuming asynchronously */ public final Flux<T> dispatchOn(Callable<? extends Consumer<Runnable>> scheduler) { return dispatchOn(this, scheduler, true, PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<T>xs()); } /** * Triggered after the {@link Flux} terminates, either by completing downstream successfully or with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doafterterminate.png" alt=""> * <p> * @param afterTerminate the callback to call after {@link Subscriber#onComplete} or {@link Subscriber#onError} * * @return a new unaltered {@link Flux} */ public final Flux<T> doAfterTerminate(Runnable afterTerminate) { return new FluxPeek<>(this, null, null, null, afterTerminate, null, null, null); } /** * Triggered when the {@link Flux} is cancelled. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dooncancel.png" alt=""> * <p> * @param onCancel the callback to call on {@link Subscription#cancel} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnCancel(Runnable onCancel) { return new FluxPeek<>(this, null, null, null, null, null, null, onCancel); } /** * Triggered when the {@link Flux} completes successfully. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dooncomplete.png" alt=""> * <p> * @param onComplete the callback to call on {@link Subscriber#onComplete} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnComplete(Runnable onComplete) { return new FluxPeek<>(this, null, null, null, onComplete, null, null, null); } /** * Triggered when the {@link Flux} completes with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonerror.png" alt=""> * <p> * @param onError the callback to call on {@link Subscriber#onError} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnError(Consumer<? super Throwable> onError) { return new FluxPeek<>(this, null, null, onError, null, null, null, null); } /** * Triggered when the {@link Flux} emits an item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonnext.png" alt=""> * <p> * @param onNext the callback to call on {@link Subscriber#onNext} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnNext(Consumer<? super T> onNext) { return new FluxPeek<>(this, null, onNext, null, null, null, null, null); } /** * Triggered when the {@link Flux} is subscribed. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonsubscribe.png" alt=""> * <p> * @param onSubscribe the callback to call on {@link Subscriber#onSubscribe} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnSubscribe(Consumer<? super Subscription> onSubscribe) { return new FluxPeek<>(this, onSubscribe, null, null, null, null, null, null); } /** * Triggered when the {@link Flux} terminates, either by completing successfully or with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonterminate.png" alt=""> * <p> * @param onTerminate the callback to call on {@link Subscriber#onComplete} or {@link Subscriber#onError} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnTerminate(Runnable onTerminate) { return new FluxPeek<>(this, null, null, null, null, onTerminate, null, null); } /** * Transform the items emitted by this {@link Flux} into Publishers, then flatten the emissions from those by * merging them into a single {@link Flux}, so that they may interleave. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmap.png" alt=""> * <p> * @param mapper the {@link Function} to transform input sequence into N sequences {@link Publisher} * @param <R> the merged output sequence type * * @return a new {@link Flux} */ public final <R> Flux<R> flatMap(Function<? super T, ? extends Publisher<? extends R>> mapper) { return new FluxFlatMap<>( this, mapper, false, PlatformDependent.SMALL_BUFFER_SIZE, QueueSupplier.<R>small(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<R>xs() ); } /** * Transform the signals emitted by this {@link Flux} into Publishers, then flatten the emissions from those by * merging them into a single {@link Flux}, so that they may interleave. * OnError will be transformed into completion signal after its mapping callback has been applied. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmaps.png" alt=""> * <p> * @param mapperOnNext the {@link Function} to call on next data and returning a sequence to merge * @param mapperOnError the {@link Function} to call on error signal and returning a sequence to merge * @param mapperOnComplete the {@link Function} to call on complete signal and returning a sequence to merge * @param <R> the output {@link Publisher} type target * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public final <R> Flux<R> flatMap(Function<? super T, ? extends Publisher<? extends R>> mapperOnNext, Function<Throwable, ? extends Publisher<? extends R>> mapperOnError, Supplier<? extends Publisher<? extends R>> mapperOnComplete) { return new FluxFlatMap<>( new FluxMapSignal<>(this, mapperOnNext, mapperOnError, mapperOnComplete), Flux.IDENTITY_FUNCTION, false, PlatformDependent.SMALL_BUFFER_SIZE, QueueSupplier.<R>small(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<R>xs() ); } @Override public String getName() { return getClass().getSimpleName() .replace(Flux.class.getSimpleName(), ""); } @Override public int getMode() { return FACTORY; } /** * Create a {@link Flux} intercepting all source signals with the returned Subscriber that might choose to pass them * alone to the provided Subscriber (given to the returned {@code subscribe(Subscriber)}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/lift.png" alt=""> * <p> * @param lifter the function accepting the target {@link Subscriber} and returning the {@link Subscriber} * exposed this sequence * @param <R> the output operator type * * @return a new {@link Flux} */ public final <R> Flux<R> lift(Function<Subscriber<? super R>, Subscriber<? super T>> lifter) { return new FluxLift<>(this, lifter); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use {@link Level#INFO} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * The default log category will be "reactor.core.publisher.FluxLog". * * @return a new unaltered {@link Flux} */ public final Flux<T> log() { return log(null, Level.INFO, Logger.ALL); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use {@link Level#INFO} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * * @return a new unaltered {@link Flux} */ public final Flux<T> log(String category) { return log(category, Level.INFO, Logger.ALL); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * * @return a new unaltered {@link Flux} */ public final Flux<T> log(String category, Level level) { return log(category, level, Logger.ALL); } /** * Observe Reactive Streams signals matching the passed flags {@code options} and use {@link Logger} support to * handle trace * implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: * <pre> * flux.log("category", Level.INFO, Logger.ON_NEXT | LOGGER.ON_ERROR) * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * @param options a flag option that can be mapped with {@link Logger#ON_NEXT} etc. * * @return a new unaltered {@link Flux} */ public final Flux<T> log(String category, Level level, int options) { return new FluxLog<>(this, category, level, options); } /** * Transform the items emitted by this {@link Flux} by applying a function to each item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/map.png" alt=""> * <p> * @param mapper the transforming {@link Function} * @param <R> the transformed type * * @return a new {@link Flux} */ public final <R> Flux<R> map(Function<? super T, ? extends R> mapper) { return new FluxMap<>(this, mapper); } /** * Merge emissions of this {@link Flux} with the provided {@link Publisher}, so that they may interleave. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * <p> * @param other the {@link Publisher} to merge with * * @return a new {@link Flux} */ public final Flux<T> mergeWith(Publisher<? extends T> other) { return merge(just(this, other)); } /** * Emit only the first item emitted by this {@link Flux}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/next.png" alt=""> * <p> * If the sequence emits more than 1 data, emit {@link ArrayIndexOutOfBoundsException}. * * @return a new {@link Mono} */ public final Mono<T> next() { return new MonoNext<>(this); } /** * Subscribe to a returned fallback publisher when any error occurs. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorresumewith.png" alt=""> * <p> * @param fallback the {@link Function} mapping the error to a new {@link Publisher} sequence * * @return a new {@link Flux} */ public final Flux<T> onErrorResumeWith(Function<Throwable, ? extends Publisher<? extends T>> fallback) { return new FluxResume<>(this, fallback); } /** * Fallback to the given value if an error is observed on this {@link Flux} * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorreturn.png" alt=""> * <p> * @param fallbackValue alternate value on fallback * * @return a new {@link Flux} */ public final Flux<T> onErrorReturn(final T fallbackValue) { return switchOnError(just(fallbackValue)); } /** * Run subscribe, onSubscribe and request on a supplied * {@link Consumer} {@link Runnable} factory like {@link SchedulerGroup}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/publishon.png" alt=""> * <p> * <p> * Typically used for slow publisher e.g., blocking IO, fast consumer(s) scenarios. * It naturally combines with {@link SchedulerGroup#io} which implements work-queue thread dispatching. * * <p> * {@code flux.publishOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param schedulerFactory a checked factory for {@link Consumer} of {@link Runnable} * * @return a {@link Flux} publishing asynchronously */ public final Flux<T> publishOn(Callable<? extends Consumer<Runnable>> schedulerFactory) { return publishOn(this, schedulerFactory); } /** * Subscribe to the given fallback {@link Publisher} if an error is observed on this {@link Flux} * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchonerror.png" alt=""> * <p> * * @param fallback the alternate {@link Publisher} * * @return a new {@link Flux} */ public final Flux<T> switchOnError(final Publisher<? extends T> fallback) { return onErrorResumeWith(FluxResume.create(fallback)); } /** * Provide an alternative if this sequence is completed without any data * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchifempty.png" alt=""> * <p> * @param alternate the alternate publisher if this sequence is empty * * @return a new {@link Flux} */ public final Flux<T> switchIfEmpty(Publisher<? extends T> alternate) { return new FluxSwitchIfEmpty<>(this, alternate); } /** * Start the chain and request unbounded demand. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/unbounded.png" alt=""> * <p> * * @return a {@link Runnable} task to execute to dispose and cancel the underlying {@link Subscription} */ public final Runnable subscribe() { ConsumerSubscriber<T> s = new ConsumerSubscriber<>(); subscribe(s); return s; } /** * * A chaining {@link Publisher#subscribe(Subscriber)} alternative to inline composition type conversion to a hot * emitter (e.g. reactor FluxProcessor Broadcaster and Promise or rxjava Subject). * * {@code flux.subscribeWith(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param subscriber the {@link Subscriber} to subscribe and return * @param <E> the reified type from the input/output subscriber * * @return the passed {@link Subscriber} */ public final <E extends Subscriber<? super T>> E subscribeWith(E subscriber) { subscribe(subscriber); return subscriber; } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/toiterable.png" alt=""> * <p> * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable() { return toIterable(this instanceof Backpressurable ? ((Backpressurable) this).getCapacity() : Long.MAX_VALUE ); } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/toiterablen.png" alt=""> * <p> * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable(long batchSize) { return toIterable(batchSize, null); } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/toiterablen.png" alt=""> * <p> * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable(final long batchSize, Supplier<Queue<T>> queueProvider) { final Supplier<Queue<T>> provider; if(queueProvider == null){ provider = QueueSupplier.get(batchSize); } else{ provider = queueProvider; } return new BlockingIterable<>(this, batchSize, provider); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source2 The second upstream {@link Publisher} to subscribe to. * @param <R> type of the value from source2 * * @return a zipped {@link Flux} */ public final <R> Flux<Tuple2<T, R>> zipWith(Publisher<? extends R> source2) { return zip(this, source2); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator from the most recent items emitted by each source until any of them * completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * <p> * @param source2 The second upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <R> type of the value from source2 * @param <V> The produced output after transformation by the combinator * * @return a zipped {@link Flux} */ public final <R, V> Flux<V> zipWith(Publisher<? extends R> source2, final BiFunction<? super T, ? super R, ? extends V> combinator) { return zip(this, source2, combinator); } // ============================================================================================================== // Containers // ============================================================================================================== /** * Decorate a {@link Flux} with a capacity for downstream accessors * * @param <I> */ final static class FluxBounded<I> extends FluxSource<I, I> { final private long capacity; public FluxBounded(Publisher<I> source, long capacity) { super(source); this.capacity = capacity; } @Override public long getCapacity() { return capacity; } @Override public String getName() { return "Bounded"; } @Override public void subscribe(Subscriber<? super I> s) { source.subscribe(s); } } /** * i -> i */ static final class IdentityFunction implements Function { @Override public Object apply(Object o) { return o; } } }
src/main/java/reactor/core/publisher/Flux.java
/* * Copyright (c) 2011-2016 Pivotal Software Inc, All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package reactor.core.publisher; import java.util.Iterator; import java.util.Queue; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; import org.reactivestreams.Subscription; import reactor.core.flow.Fuseable; import reactor.core.queue.QueueSupplier; import reactor.core.state.Backpressurable; import reactor.core.state.Introspectable; import reactor.core.subscriber.BlockingIterable; import reactor.core.subscriber.ConsumerSubscriber; import reactor.core.subscriber.SignalEmitter; import reactor.core.subscriber.SubscriberWithContext; import reactor.core.timer.Timer; import reactor.core.util.Assert; import reactor.core.util.Logger; import reactor.core.util.PlatformDependent; import reactor.core.util.ReactiveStateUtils; import reactor.fn.BiConsumer; import reactor.fn.BiFunction; import reactor.fn.Consumer; import reactor.fn.Function; import reactor.fn.Supplier; import reactor.fn.tuple.Tuple; import reactor.fn.tuple.Tuple2; import reactor.fn.tuple.Tuple3; import reactor.fn.tuple.Tuple4; import reactor.fn.tuple.Tuple5; import reactor.fn.tuple.Tuple6; /** * A Reactive Streams {@link Publisher} with basic rx operators that emits 0 to N elements, and then completes * (successfully or with an error). * * <p> * <img width="640" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flux.png" alt=""> * <p> * * <p>It is intended to be used in implementations and return types. Input parameters should keep using raw * {@link Publisher} as much as possible. * * <p>If it is known that the underlying {@link Publisher} will emit 0 or 1 element, {@link Mono} should be used * instead. * * @author Sebastien Deleuze * @author Stephane Maldini * @see Mono * @since 2.5 */ public abstract class Flux<T> implements Publisher<T>, Introspectable { // ============================================================================================================== // Static Generators // ============================================================================================================== static final IdentityFunction IDENTITY_FUNCTION = new IdentityFunction(); static final Flux<?> EMPTY = from(Mono.empty()); /** * Select the fastest source who won the "ambiguous" race and emitted first onNext or onComplete or onError * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> <p> * * @param sources The competing source publishers * @param <I> The source type of the data sequence * * @return a new {@link Flux} eventually subscribed to one of the sources or empty */ @SuppressWarnings({"unchecked", "varargs"}) @SafeVarargs public static <I> Flux<I> amb(Publisher<? extends I>... sources) { return new FluxAmb<>(sources); } /** * Select the fastest source who won the "ambiguous" race and emitted first onNext or onComplete or onError * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> <p> * * @param sources The competing source publishers * @param <I> The source type of the data sequence * * @return a new {@link Flux} eventually subscribed to one of the sources or empty */ @SuppressWarnings("unchecked") public static <I> Flux<I> amb(Iterable<? extends Publisher<? extends I>> sources) { if (sources == null) { return empty(); } return new FluxAmb<>(sources); } /** * Concat all sources emitted as an onNext signal from a parent {@link Publisher}. * A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned {@link Publisher} which will stop listening if the main sequence has also completed. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concatinner.png" alt=""> * <p> * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new {@link Flux} concatenating all inner sources sequences until complete or error */ @SuppressWarnings("unchecked") public static <I> Flux<I> concat(Publisher<? extends Publisher<? extends I>> sources) { return new FluxFlatMap<>( sources, IDENTITY_FUNCTION, false, 1, QueueSupplier.<I>one(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<I>xs() ); } /** * Concat all sources pulled from the supplied * {@link Iterator} on {@link Publisher#subscribe} from the passed {@link Iterable} until {@link Iterator#hasNext} * returns false. A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned Publisher. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * <p> * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new {@link Flux} concatenating all source sequences */ public static <I> Flux<I> concat(Iterable<? extends Publisher<? extends I>> sources) { return concat(fromIterable(sources)); } /** * Concat all sources pulled from the given {@link Publisher} array. * A complete signal from each source will delimit the individual sequences and will be eventually * passed to the returned Publisher. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * <p> * @param sources The {@link Publisher} of {@link Publisher} to concat * @param <I> The source type of the data sequence * * @return a new {@link Flux} concatenating all source sequences */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) public static <I> Flux<I> concat(Publisher<? extends I>... sources) { if (sources == null || sources.length == 0) { return empty(); } if (sources.length == 1) { return from(sources[0]); } return concat(fromArray(sources)); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * <p> * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param <T> The type of the data sequence * * @return a new {@link Flux} */ public static <T> Flux<T> create(Consumer<SubscriberWithContext<T, Void>> requestConsumer) { return create(requestConsumer, null, null); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * The argument {@code contextFactory} is executed once by new subscriber to generate a context shared by every * request calls. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * <p> * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param contextFactory A {@link Function} called for every new subscriber returning an immutable context (IO * connection...) * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a new {@link Flux} */ public static <T, C> Flux<T> create(Consumer<SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory) { return create(requestConsumer, contextFactory, null); } /** * Create a {@link Flux} reacting on each available {@link Subscriber} read derived with the passed {@link * Consumer}. If a previous request is still running, avoid recursion and extend the previous request iterations. * The argument {@code contextFactory} is executed once by new subscriber to generate a context shared by every * request calls. The argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, * onComplete, onError). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generateforeach.png" alt=""> * <p> * @param requestConsumer A {@link Consumer} invoked when available read with the target subscriber * @param contextFactory A {@link Function} called once for every new subscriber returning an immutable context (IO * connection...) * @param shutdownConsumer A {@link Consumer} called once everytime a subscriber terminates: cancel, onComplete(), * onError() * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a new {@link Flux} */ public static <T, C> Flux<T> create(final Consumer<SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory, Consumer<C> shutdownConsumer) { Assert.notNull(requestConsumer, "A data producer must be provided"); return new FluxGenerate.FluxForEach<>(requestConsumer, contextFactory, shutdownConsumer); } /** * Run onNext, onComplete and onError on a supplied * {@link Consumer} {@link Runnable} scheduler e.g. {@link SchedulerGroup#call}. * * <p> * Typically used for fast publisher, slow consumer(s) scenarios. * It naturally combines with {@link SchedulerGroup#single} and {@link SchedulerGroup#async} which implement * fast async event loops. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dispatchon.png" alt=""> * <p> * {@code flux.dispatchOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param source the {@link Publisher} to dispatch asynchronously * @param scheduler a checked factory for {@link Consumer} of {@link Runnable} * @param delayError true if errors should be delayed after consuming any available backlog * @param prefetch the maximum in flight data to produce from the passed source {@link Publisher} * * @return a {@link Flux} consuming asynchronously */ public static <T> Flux<T> dispatchOn(Publisher<T> source, Callable<? extends Consumer<Runnable>> scheduler, boolean delayError, int prefetch, Supplier<? extends Queue<T>> queueProvider) { if (source instanceof Fuseable.ScalarSupplier) { @SuppressWarnings("unchecked") T value = ((Fuseable.ScalarSupplier<T>)source).get(); return new FluxPublishOnValue<>(value, scheduler, true); } return new FluxDispatchOn<>(source, scheduler, delayError, prefetch, queueProvider); } /** * Create a {@link Flux} that completes without emitting any item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/empty.png" alt=""> * <p> * @param <T> the reified type of the target {@link Subscriber} * * @return an empty {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> empty() { return (Flux<T>) EMPTY; } /** * Create a {@link Flux} that completes with the specified error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/error.png" alt=""> * <p> * @param error the error to signal to each {@link Subscriber} * @param <T> the reified type of the target {@link Subscriber} * * @return a new failed {@link Flux} */ public static <T> Flux<T> error(Throwable error) { return Mono.<T>error(error).flux(); } /** * Consume the passed * {@link Publisher} source and transform its sequence of T into a N sequences of V via the given {@link Function}. * The produced sequences {@link Publisher} will be merged back in the returned {@link Flux}. * The backpressure will apply using the provided bufferSize which will actively consume each sequence (and the * main one) and replenish its request cycle on a threshold free capacity. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmap.png" alt=""> * <p> * @param source the source to flatten * @param mapper the function to transform the upstream sequence into N sub-sequences * @param concurrency the maximum alive transformations at a given time * @param bufferSize the bounded capacity for each individual merged sequence * @param delayError Consume all pending sequence backlogs before replaying any captured error * @param <T> the source type * @param <V> the produced merged type * * @return a new merged {@link Flux} */ public static <T, V> Flux<V> flatMap( Publisher<? extends T> source, Function<? super T, ? extends Publisher<? extends V>> mapper, int concurrency, int bufferSize, boolean delayError) { return new FluxFlatMap<>( source, mapper, delayError, concurrency, QueueSupplier.<V>get(concurrency), bufferSize, QueueSupplier.<V>get(bufferSize) ); } /** * Expose the specified {@link Publisher} with the {@link Flux} API. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/from.png" alt=""> * <p> * @param source the source to decorate * @param <T> the source sequence type * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> from(Publisher<? extends T> source) { if (source instanceof Flux) { return (Flux<T>) source; } if (source instanceof Supplier) { T t = ((Supplier<T>) source).get(); if (t != null) { return just(t); } } return FluxSource.wrap(source); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Iterable}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromarray.png" alt=""> * <p> * @param array the array to read data from * @param <T> the {@link Publisher} type to stream * * @return a new {@link Flux} */ public static <T> Flux<T> fromArray(T[] array) { if (array == null || array.length == 0) { return empty(); } if (array.length == 1) { return just(array[0]); } return new FluxArray<>(array); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Iterable}. * A new iterator will be created for each subscriber. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromiterable.png" alt=""> * <p> * @param it the {@link Iterable} to read data from * @param <T> the {@link Iterable} type to stream * * @return a new {@link Flux} */ public static <T> Flux<T> fromIterable(Iterable<? extends T> it) { FluxGenerate.IterableSequencer<T> iterablePublisher = new FluxGenerate.IterableSequencer<>(it); return create(iterablePublisher, iterablePublisher); } /** * Create a {@link Flux} that emits the items contained in the provided {@link Tuple}. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/fromtuple.png" alt=""> * <p> <p> * * @param tuple the {@link Tuple} to read data from * * @return a new {@link Flux} */ public static Flux<Object> fromTuple(Tuple tuple) { return fromArray(tuple.toArray()); } /** * Create a {@link Publisher} reacting on requests with the passed {@link BiConsumer}. The argument {@code * contextFactory} is executed once by new subscriber to generate a context shared by every request calls. The * argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, onComplete, * onError). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/generate.png" alt=""> * <p> * @param requestConsumer A {@link BiConsumer} with left argument request and right argument target subscriber * @param contextFactory A {@link Function} called once for every new subscriber returning an immutable context (IO * connection...) * @param shutdownConsumer A {@link Consumer} called once everytime a subscriber terminates: cancel, onComplete(), * onError() * @param <T> The type of the data sequence * @param <C> The type of contextual information to be read by the requestConsumer * * @return a fresh Reactive {@link Flux} publisher ready to be subscribed */ public static <T, C> Flux<T> generate(BiConsumer<Long, SubscriberWithContext<T, C>> requestConsumer, Function<Subscriber<? super T>, C> contextFactory, Consumer<C> shutdownConsumer) { return new FluxGenerate<>(new FluxGenerate.RecursiveConsumer<>(requestConsumer), contextFactory, shutdownConsumer); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N seconds on * the given timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * <p> * @param seconds The number of seconds to wait before the next increment * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long seconds) { return interval(seconds, TimeUnit.SECONDS); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N period of time unit on * the global timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * <p> * @param period The the time relative to given unit to wait before the next increment * @param unit The unit of time * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long period, TimeUnit unit) { return interval(period, unit, Timer.global()); } /** * Create a new {@link Flux} that emits an ever incrementing long starting with 0 every N period of time unit on * the given timer. If demand is not produced in time, an onError will be signalled. The {@link Flux} will never * complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/interval.png" alt=""> * <p> * @param period The the time relative to given unit to wait before the next increment * @param unit The unit of time * @param timer a {@link Timer} instance * * @return a new timed {@link Flux} */ public static Flux<Long> interval(long period, TimeUnit unit, Timer timer) { long timespan = TimeUnit.MILLISECONDS.convert(period, unit); Assert.isTrue(timespan >= timer.period(), "The delay " + period + "ms cannot be less than the timer resolution" + "" + timer.period() + "ms"); return new FluxInterval(timer, period, unit, period); } /** * Create a new {@link Flux} that emits the specified items and then complete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/justn.png" alt=""> * <p> * @param data the consecutive data objects to emit * @param <T> the emitted data type * * @return a new {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <T> Flux<T> just(T... data) { return fromArray(data); } /** * Create a new {@link Flux} that will only emit the passed data then onComplete. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/just.png" alt=""> * <p> * @param data the unique data to emit * @param <T> the emitted data type * * @return a new {@link Flux} */ public static <T> Flux<T> just(T data) { return new FluxJust<>(data); } /** * Observe Reactive Streams signals matching the passed flags {@code options} and use {@link Logger} support to * handle trace * implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: * <pre> * flux.log("category", Level.INFO, Logger.ON_NEXT | LOGGER.ON_ERROR) * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * @param source the source {@link Publisher} to log * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * @param options a flag option that can be mapped with {@link Logger#ON_NEXT} etc. * * @param <T> the {@link Subscriber} type target * * @return a logged {@link Flux} */ public static <T> Flux<T> log(Publisher<T> source, String category, Level level, int options) { return new FluxLog<>(source, category, level, options); } /** * Create a {@link Flux} that will transform all signals into a target type. OnError will be transformed into * completion signal after its mapping callback has been applied. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/mapsignal.png" alt=""> * <p> * @param source the source {@link Publisher} to map * @param mapperOnNext the {@link Function} to call on next data and returning the target transformed data * @param mapperOnError the {@link Function} to call on error signal and returning the target transformed data * @param mapperOnComplete the {@link Function} to call on complete signal and returning the target transformed data * @param <T> the input publisher type * @param <V> the output {@link Publisher} type target * * @return a new {@link Flux} */ public static <T, V> Flux<V> mapSignal(Publisher<T> source, Function<? super T, ? extends V> mapperOnNext, Function<Throwable, ? extends V> mapperOnError, Supplier<? extends V> mapperOnComplete) { return new FluxMapSignal<>(source, mapperOnNext, mapperOnError, mapperOnComplete); } /** * Merge emitted {@link Publisher} sequences by the passed {@link Publisher} into an interleaved merged sequence. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/mergeinner.png" alt=""> * <p> * @param source a {@link Publisher} of {@link Publisher} sequence to merge * @param <T> the merged type * * @return a merged {@link Flux} */ @SuppressWarnings("unchecked") public static <T> Flux<T> merge(Publisher<? extends Publisher<? extends T>> source) { return new FluxFlatMap<>( source, IDENTITY_FUNCTION, false, PlatformDependent.SMALL_BUFFER_SIZE, QueueSupplier.<T>small(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<T>xs() ); } /** * Merge emitted {@link Publisher} sequences from the passed {@link Iterable} into an interleaved merged sequence. * {@link Iterable#iterator()} will be called for each {@link Publisher#subscribe}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * <p> * @param sources the {@link Iterable} to lazily iterate on {@link Publisher#subscribe(Subscriber)} * @param <I> The source type of the data sequence * * @return a fresh Reactive {@link Flux} publisher ready to be subscribed */ public static <I> Flux<I> merge(Iterable<? extends Publisher<? extends I>> sources) { return merge(fromIterable(sources)); } /** * Merge emitted {@link Publisher} sequences from the passed {@link Publisher} array into an interleaved merged * sequence. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * <p> * @param sources the {@link Publisher} array to iterate on {@link Publisher#subscribe(Subscriber)} * @param <I> The source type of the data sequence * * @return a fresh Reactive {@link Flux} publisher ready to be subscribed */ @SafeVarargs @SuppressWarnings({"unchecked", "varargs"}) public static <I> Flux<I> merge(Publisher<? extends I>... sources) { if (sources == null || sources.length == 0) { return empty(); } if (sources.length == 1) { return from(sources[0]); } return merge(fromArray(sources)); } /** * Create a {@link Flux} that will never signal any data, error or completion signal. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/never.png" alt=""> * <p> * @param <T> the {@link Subscriber} type target * * @return a never completing {@link Flux} */ public static <T> Flux<T> never() { return FluxNever.instance(); } /** * Create a {@link Flux} that will fallback to the produced {@link Publisher} given an onError signal. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorresumewith.png" alt=""> * <p> * @param <T> the {@link Subscriber} type target * * @return a resilient {@link Flux} */ public static <T> Flux<T> onErrorResumeWith( Publisher<? extends T> source, Function<Throwable, ? extends Publisher<? extends T>> fallback) { return new FluxResume<>(source, fallback); } /** * Run subscribe, onSubscribe and request on a supplied * {@link Consumer} {@link Runnable} scheduler like {@link SchedulerGroup}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/publishon.png" alt=""> * <p> * <p> * Typically used for slow publisher e.g., blocking IO, fast consumer(s) scenarios. * It naturally combines with {@link SchedulerGroup#io} which implements work-queue thread dispatching. * * <p> * {@code flux.publishOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param source a {@link Publisher} source to publish from the given scheduler * @param schedulers a checked factory for {@link Consumer} of {@link Runnable} * * @return a {@link Flux} publishing asynchronously */ public static <T> Flux<T> publishOn(Publisher<? extends T> source, Callable<? extends Consumer<Runnable>> schedulers) { return new FluxPublishOn<>(source, schedulers); } /** * Create a {@link Flux} reacting on subscribe with the passed {@link Consumer}. The argument {@code * sessionConsumer} is executed once by new subscriber to generate a {@link SignalEmitter} context ready to accept * signals. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/yield.png" alt=""> * <p> * @param sessionConsumer A {@link Consumer} called once everytime a subscriber subscribes * @param <T> The type of the data sequence * * @return a fresh Reactive {@link Flux} publisher ready to be subscribed */ public static <T> Flux<T> yield(Consumer<? super SignalEmitter<T>> sessionConsumer) { return new FluxYieldingEmitter<>(sessionConsumer); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * <p> * * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <O> The produced output after transformation by the combinator * * @return a zipped {@link Flux} */ public static <T1, T2, O> Flux<O> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, final BiFunction<? super T1, ? super T2, ? extends O> combinator) { return zip(new Function<Object[], O>() { @Override @SuppressWarnings("unchecked") public O apply(Object[] tuple) { return combinator.apply((T1)tuple[0], (T2)tuple[1]); } }, source1, source2); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * * @return a zipped {@link Flux} */ public static <T1, T2> Flux<Tuple2<T1, T2>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2) { return zip(Tuple.<T1, T2>fn2(), source1, source2); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 */ @SuppressWarnings("unchecked") public static <T1, T2, T3> Flux<Tuple3<T1, T2, T3>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3) { return zip(Tuple.<T1, T2, T3>fn3(), source1, source2, source3); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4> Flux<Tuple4<T1, T2, T3, T4>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4) { return zip(Tuple.<T1, T2, T3, T4>fn4(), source1, source2, source3, source4); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5> Flux<Tuple5<T1, T2, T3, T4, T5>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5) { return zip(Tuple.<T1, T2, T3, T4, T5>fn5(), source1, source2, source3, source4, source5); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source1 The first upstream {@link Publisher} to subscribe to. * @param source2 The second upstream {@link Publisher} to subscribe to. * @param source3 The third upstream {@link Publisher} to subscribe to. * @param source4 The fourth upstream {@link Publisher} to subscribe to. * @param source5 The fifth upstream {@link Publisher} to subscribe to. * @param source6 The sixth upstream {@link Publisher} to subscribe to. * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <T3> type of the value from source3 * @param <T4> type of the value from source4 * @param <T5> type of the value from source5 * @param <T6> type of the value from source6 */ @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6> Flux<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, Publisher<? extends T3> source3, Publisher<? extends T4> source4, Publisher<? extends T5> source5, Publisher<? extends T6> source6) { return zip(Tuple.<T1, T2, T3, T4, T5, T6>fn6(), source1, source2, source3, source4, source5, source6); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * of the most recent items emitted by each source until any of them completes. Errors will immediately be * forwarded. * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * * @return a zipped {@link Flux} */ @SuppressWarnings("unchecked") public static Flux<Tuple> zip(Iterable<? extends Publisher<?>> sources) { return zip(sources, Tuple.fnAny()); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <O> the combined produced type * * @return a zipped {@link Flux} */ public static <O> Flux<O> zip(Iterable<? extends Publisher<?>> sources, final Function<? super Object[], ? extends O> combinator) { return zip(sources, PlatformDependent.XS_BUFFER_SIZE, combinator); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * * The {@link Iterable#iterator()} will be called on each {@link Publisher#subscribe(Subscriber)}. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * * @param sources the {@link Iterable} to iterate on {@link Publisher#subscribe(Subscriber)} * @param prefetch the inner source request size * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <O> the combined produced type * * @return a zipped {@link Flux} */ public static <O> Flux<O> zip(Iterable<? extends Publisher<?>> sources, int prefetch, final Function<? super Object[], ? extends O> combinator) { if (sources == null) { return empty(); } return new FluxZip<>(sources, combinator, QueueSupplier.get(prefetch), prefetch); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * <p> * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param sources the {@link Publisher} array to iterate on {@link Publisher#subscribe(Subscriber)} * @param <O> the combined produced type * * @return a zipped {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <I, O> Flux<O> zip( final Function<? super Object[], ? extends O> combinator, Publisher<? extends I>... sources) { return zip(combinator, PlatformDependent.XS_BUFFER_SIZE, sources); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator function of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * <p> * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param prefetch individual source request size * @param sources the {@link Publisher} array to iterate on {@link Publisher#subscribe(Subscriber)} * @param <O> the combined produced type * * @return a zipped {@link Flux} */ @SafeVarargs @SuppressWarnings("varargs") public static <I, O> Flux<O> zip( final Function<? super Object[], ? extends O> combinator, int prefetch, Publisher<? extends I>... sources) { if (sources == null) { return empty(); } return new FluxZip<>(sources, combinator, QueueSupplier.get(prefetch), prefetch); } // ============================================================================================================== // Instance Operators // ============================================================================================================== protected Flux() { } /** * Immediately apply the given transformation to this {@link Flux} in order to generate a target {@link Publisher} type. * * {@code flux.as(Mono::from).subscribe(Subscribers.unbounded()) } * * @param transformer the {@link Function} to immediately map this {@link Flux} into a target {@link Publisher} * instance. * @param <P> the returned {@link Publisher} sequence type * * @return a new {@link Flux} */ public final <V, P extends Publisher<V>> P as(Function<? super Flux<T>, P> transformer) { return transformer.apply(this); } /** * Return a {@code Mono<Void>} that completes when this {@link Flux} completes. * This will actively ignore the sequence and only replay completion or error signals. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/after.png" alt=""> * <p> * @return a new {@link Mono} */ @SuppressWarnings("unchecked") public final Mono<Void> after() { return (Mono<Void>)new MonoIgnoreElements<>(this); } /** * Emit from the fastest first sequence between this publisher and the given publisher * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/amb.png" alt=""> * <p> * @param other the {@link Publisher} to race with * * @return the fastest sequence */ public final Flux<T> ambWith(Publisher<? extends T> other) { return amb(this, other); } /** * Hint {@link Subscriber} to this {@link Flux} a preferred available capacity should be used. * {@link #toIterable()} can for instance use introspect this value to supply an appropriate queueing strategy. * * @param capacity the maximum capacity (in flight onNext) the return {@link Publisher} should expose * * @return a bounded {@link Flux} */ public final Flux<T> capacity(long capacity) { return new FluxBounded<>(this, capacity); } /** * Like {@link #flatMap(Function)}, but concatenate emissions instead of merging (no interleave). * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concatmap.png" alt=""> * <p> * @param mapper the function to transform this sequence of T into concated sequences of R * @param <R> the produced concated type * * @return a new {@link Flux} */ public final <R> Flux<R> concatMap(Function<? super T, ? extends Publisher<? extends R>> mapper) { return new FluxFlatMap<>( this, mapper, false, 1, QueueSupplier.<R>one(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<R>xs() ); } /** * Concatenate emissions of this {@link Flux} with the provided {@link Publisher} (no interleave). * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/concat.png" alt=""> * <p> * @param other the {@link Publisher} sequence to concat after this {@link Flux} * * @return a new {@link Flux} */ public final Flux<T> concatWith(Publisher<? extends T> other) { return concat(this, other); } /** * Introspect this {@link Flux} graph * * @return {@link ReactiveStateUtils} {@literal Graph} representation of the operational flow */ public final ReactiveStateUtils.Graph debug() { return ReactiveStateUtils.scan(this); } /** * Provide a default unique value if this sequence is completed without any data * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/defaultifempty.png" alt=""> * <p> * @param defaultV the alternate value if this sequence is empty * * @return a new {@link Flux} */ public final Flux<T> defaultIfEmpty(T defaultV) { return new FluxSwitchIfEmpty<>(this, just(defaultV)); } /** * Run onNext, onComplete and onError on a supplied * {@link Consumer} {@link Runnable} scheduler factory like {@link SchedulerGroup}. * * <p> * Typically used for fast publisher, slow consumer(s) scenarios. * It naturally combines with {@link SchedulerGroup#single} and {@link SchedulerGroup#async} which implement * fast async event loops. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dispatchon.png" alt=""> * <p> * {@code flux.dispatchOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param scheduler a checked factory for {@link Consumer} of {@link Runnable} * * @return a {@link Flux} consuming asynchronously */ public final Flux<T> dispatchOn(Callable<? extends Consumer<Runnable>> scheduler) { return dispatchOn(this, scheduler, true, PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<T>xs()); } /** * Triggered after the {@link Flux} terminates, either by completing downstream successfully or with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doafterterminate.png" alt=""> * <p> * @param afterTerminate the callback to call after {@link Subscriber#onComplete} or {@link Subscriber#onError} * * @return a new unaltered {@link Flux} */ public final Flux<T> doAfterTerminate(Runnable afterTerminate) { return new FluxPeek<>(this, null, null, null, afterTerminate, null, null, null); } /** * Triggered when the {@link Flux} is cancelled. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dooncancel.png" alt=""> * <p> * @param onCancel the callback to call on {@link Subscription#cancel} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnCancel(Runnable onCancel) { return new FluxPeek<>(this, null, null, null, null, null, null, onCancel); } /** * Triggered when the {@link Flux} completes successfully. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/dooncomplete.png" alt=""> * <p> * @param onComplete the callback to call on {@link Subscriber#onComplete} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnComplete(Runnable onComplete) { return new FluxPeek<>(this, null, null, null, onComplete, null, null, null); } /** * Triggered when the {@link Flux} completes with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonerror.png" alt=""> * <p> * @param onError the callback to call on {@link Subscriber#onError} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnError(Consumer<? super Throwable> onError) { return new FluxPeek<>(this, null, null, onError, null, null, null, null); } /** * Triggered when the {@link Flux} emits an item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonnext.png" alt=""> * <p> * @param onNext the callback to call on {@link Subscriber#onNext} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnNext(Consumer<? super T> onNext) { return new FluxPeek<>(this, null, onNext, null, null, null, null, null); } /** * Triggered when the {@link Flux} is subscribed. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonsubscribe.png" alt=""> * <p> * @param onSubscribe the callback to call on {@link Subscriber#onSubscribe} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnSubscribe(Consumer<? super Subscription> onSubscribe) { return new FluxPeek<>(this, onSubscribe, null, null, null, null, null, null); } /** * Triggered when the {@link Flux} terminates, either by completing successfully or with an error. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/doonterminate.png" alt=""> * <p> * @param onTerminate the callback to call on {@link Subscriber#onComplete} or {@link Subscriber#onError} * * @return a new unaltered {@link Flux} */ public final Flux<T> doOnTerminate(Runnable onTerminate) { return new FluxPeek<>(this, null, null, null, null, onTerminate, null, null); } /** * Transform the items emitted by this {@link Flux} into Publishers, then flatten the emissions from those by * merging them into a single {@link Flux}, so that they may interleave. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmap.png" alt=""> * <p> * @param mapper the {@link Function} to transform input sequence into N sequences {@link Publisher} * @param <R> the merged output sequence type * * @return a new {@link Flux} */ public final <R> Flux<R> flatMap(Function<? super T, ? extends Publisher<? extends R>> mapper) { return new FluxFlatMap<>( this, mapper, false, PlatformDependent.SMALL_BUFFER_SIZE, QueueSupplier.<R>small(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<R>xs() ); } /** * Transform the signals emitted by this {@link Flux} into Publishers, then flatten the emissions from those by * merging them into a single {@link Flux}, so that they may interleave. * OnError will be transformed into completion signal after its mapping callback has been applied. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/flatmaps.png" alt=""> * <p> * @param mapperOnNext the {@link Function} to call on next data and returning a sequence to merge * @param mapperOnError the {@link Function} to call on error signal and returning a sequence to merge * @param mapperOnComplete the {@link Function} to call on complete signal and returning a sequence to merge * @param <R> the output {@link Publisher} type target * * @return a new {@link Flux} */ @SuppressWarnings("unchecked") public final <R> Flux<R> flatMap(Function<? super T, ? extends Publisher<? extends R>> mapperOnNext, Function<Throwable, ? extends Publisher<? extends R>> mapperOnError, Supplier<? extends Publisher<? extends R>> mapperOnComplete) { return new FluxFlatMap<>( new FluxMapSignal<>(this, mapperOnNext, mapperOnError, mapperOnComplete), Flux.IDENTITY_FUNCTION, false, PlatformDependent.SMALL_BUFFER_SIZE, QueueSupplier.<R>small(), PlatformDependent.XS_BUFFER_SIZE, QueueSupplier.<R>xs() ); } @Override public String getName() { return getClass().getSimpleName() .replace(Flux.class.getSimpleName(), ""); } @Override public int getMode() { return FACTORY; } /** * Create a {@link Flux} intercepting all source signals with the returned Subscriber that might choose to pass them * alone to the provided Subscriber (given to the returned {@code subscribe(Subscriber)}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/lift.png" alt=""> * <p> * @param lifter the function accepting the target {@link Subscriber} and returning the {@link Subscriber} * exposed this sequence * @param <R> the output operator type * * @return a new {@link Flux} */ public final <R> Flux<R> lift(Function<Subscriber<? super R>, Subscriber<? super T>> lifter) { return new FluxLift<>(this, lifter); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use {@link Level#INFO} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * The default log category will be "reactor.core.publisher.FluxLog". * * @return a new unaltered {@link Flux} */ public final Flux<T> log() { return log(null, Level.INFO, Logger.ALL); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use {@link Level#INFO} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * * @return a new unaltered {@link Flux} */ public final Flux<T> log(String category) { return log(category, Level.INFO, Logger.ALL); } /** * Observe all Reactive Streams signals and use {@link Logger} support to handle trace implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * * @return a new unaltered {@link Flux} */ public final Flux<T> log(String category, Level level) { return log(category, level, Logger.ALL); } /** * Observe Reactive Streams signals matching the passed flags {@code options} and use {@link Logger} support to * handle trace * implementation. Default will * use the passed {@link Level} and java.util.logging. If SLF4J is available, it will be used instead. * * Options allow fine grained filtering of the traced signal, for instance to only capture onNext and onError: * <pre> * flux.log("category", Level.INFO, Logger.ON_NEXT | LOGGER.ON_ERROR) * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/log.png" alt=""> * <p> * @param category to be mapped into logger configuration (e.g. org.springframework.reactor). * @param level the level to enforce for this tracing Flux * @param options a flag option that can be mapped with {@link Logger#ON_NEXT} etc. * * @return a new unaltered {@link Flux} */ public final Flux<T> log(String category, Level level, int options) { return new FluxLog<>(this, category, level, options); } /** * Transform the items emitted by this {@link Flux} by applying a function to each item. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/map.png" alt=""> * <p> * @param mapper the transforming {@link Function} * @param <R> the transformed type * * @return a new {@link Flux} */ public final <R> Flux<R> map(Function<? super T, ? extends R> mapper) { return new FluxMap<>(this, mapper); } /** * Merge emissions of this {@link Flux} with the provided {@link Publisher}, so that they may interleave. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/merge.png" alt=""> * <p> * @param other the {@link Publisher} to merge with * * @return a new {@link Flux} */ public final Flux<T> mergeWith(Publisher<? extends T> other) { return merge(just(this, other)); } /** * Emit only the first item emitted by this {@link Flux}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/next.png" alt=""> * <p> * If the sequence emits more than 1 data, emit {@link ArrayIndexOutOfBoundsException}. * * @return a new {@link Mono} */ public final Mono<T> next() { return new MonoNext<>(this); } /** * Subscribe to a returned fallback publisher when any error occurs. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorresumewith.png" alt=""> * <p> * @param fallback the {@link Function} mapping the error to a new {@link Publisher} sequence * * @return a new {@link Flux} */ public final Flux<T> onErrorResumeWith(Function<Throwable, ? extends Publisher<? extends T>> fallback) { return new FluxResume<>(this, fallback); } /** * Fallback to the given value if an error is observed on this {@link Flux} * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/onerrorreturn.png" alt=""> * <p> * @param fallbackValue alternate value on fallback * * @return a new {@link Flux} */ public final Flux<T> onErrorReturn(final T fallbackValue) { return switchOnError(just(fallbackValue)); } /** * Run subscribe, onSubscribe and request on a supplied * {@link Consumer} {@link Runnable} factory like {@link SchedulerGroup}. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/publishon.png" alt=""> * <p> * <p> * Typically used for slow publisher e.g., blocking IO, fast consumer(s) scenarios. * It naturally combines with {@link SchedulerGroup#io} which implements work-queue thread dispatching. * * <p> * {@code flux.publishOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param schedulers a checked factory for {@link Consumer} of {@link Runnable} * * @return a {@link Flux} publishing asynchronously */ public final Flux<T> publishOn(Callable<? extends Consumer<Runnable>> schedulers) { return publishOn(this, schedulers); } /** * Subscribe to the given fallback {@link Publisher} if an error is observed on this {@link Flux} * * @param fallback the alternate {@link Publisher} * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchonerror.png" alt=""> * <p> * @return a new {@link Flux} */ public final Flux<T> switchOnError(final Publisher<? extends T> fallback) { return onErrorResumeWith(FluxResume.create(fallback)); } /** * Provide an alternative if this sequence is completed without any data * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchifempty.png" alt=""> * <p> * @param alternate the alternate publisher if this sequence is empty * * @return a new {@link Flux} */ public final Flux<T> switchIfEmpty(Publisher<? extends T> alternate) { return new FluxSwitchIfEmpty<>(this, alternate); } /** * Start the chain and request unbounded demand. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/unbounded.png" alt=""> * <p> * * @return a {@link Runnable} task to execute to dispose and cancel the underlying {@link Subscription} */ public final Runnable subscribe() { ConsumerSubscriber<T> s = new ConsumerSubscriber<>(); subscribe(s); return s; } /** * * A chaining {@link Publisher#subscribe(Subscriber)} alternative to inline composition type conversion to a hot * emitter (e.g. reactor FluxProcessor Broadcaster and Promise or rxjava Subject). * * {@code flux.subscribeWith(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } * * @param subscriber the {@link Subscriber} to subscribe and return * @param <E> the reified type from the input/output subscriber * * @return the passed {@link Subscriber} */ public final <E extends Subscriber<? super T>> E subscribeWith(E subscriber) { subscribe(subscriber); return subscriber; } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/toiterable.png" alt=""> * <p> * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable() { return toIterable(this instanceof Backpressurable ? ((Backpressurable) this).getCapacity() : Long.MAX_VALUE ); } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/toiterablen.png" alt=""> * <p> * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable(long batchSize) { return toIterable(batchSize, null); } /** * Transform this {@link Flux} into a lazy {@link Iterable} blocking on next calls. * * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/toiterablen.png" alt=""> * <p> * * @return a blocking {@link Iterable} */ public final Iterable<T> toIterable(final long batchSize, Supplier<Queue<T>> queueProvider) { final Supplier<Queue<T>> provider; if(queueProvider == null){ provider = QueueSupplier.get(batchSize); } else{ provider = queueProvider; } return new BlockingIterable<>(this, batchSize, provider); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations of the * most recent items emitted by each source until any of them completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zipt.png" alt=""> * <p> * @param source2 The second upstream {@link Publisher} to subscribe to. * @param <R> type of the value from source2 * * @return a zipped {@link Flux} */ public final <R> Flux<Tuple2<T, R>> zipWith(Publisher<? extends R> source2) { return zip(this, source2); } /** * "Step-Merge" especially useful in Scatter-Gather scenarios. The operator will forward all combinations * produced by the passed combinator from the most recent items emitted by each source until any of them * completes. Errors will immediately be forwarded. * <p> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/zip.png" alt=""> * <p> * @param source2 The second upstream {@link Publisher} to subscribe to. * @param combinator The aggregate function that will receive a unique value from each upstream and return the value * to signal downstream * @param <R> type of the value from source2 * @param <V> The produced output after transformation by the combinator * * @return a zipped {@link Flux} */ public final <R, V> Flux<V> zipWith(Publisher<? extends R> source2, final BiFunction<? super T, ? super R, ? extends V> combinator) { return zip(this, source2, combinator); } // ============================================================================================================== // Containers // ============================================================================================================== /** * Decorate a {@link Flux} with a capacity for downstream accessors * * @param <I> */ final static class FluxBounded<I> extends FluxSource<I, I> { final private long capacity; public FluxBounded(Publisher<I> source, long capacity) { super(source); this.capacity = capacity; } @Override public long getCapacity() { return capacity; } @Override public String getName() { return "Bounded"; } @Override public void subscribe(Subscriber<? super I> s) { source.subscribe(s); } } /** * i -> i */ static final class IdentityFunction implements Function { @Override public Object apply(Object o) { return o; } } }
- Improved Javadoc for Flux
src/main/java/reactor/core/publisher/Flux.java
- Improved Javadoc for Flux
<ide><path>rc/main/java/reactor/core/publisher/Flux.java <ide> <ide> <ide> /** <del> * Create a {@link Publisher} reacting on requests with the passed {@link BiConsumer}. The argument {@code <add> * Create a {@link Flux} reacting on requests with the passed {@link BiConsumer}. The argument {@code <ide> * contextFactory} is executed once by new subscriber to generate a context shared by every request calls. The <ide> * argument {@code shutdownConsumer} is executed once by subscriber termination event (cancel, onComplete, <ide> * onError). <ide> * {@code flux.publishOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } <ide> * <ide> * @param source a {@link Publisher} source to publish from the given scheduler <del> * @param schedulers a checked factory for {@link Consumer} of {@link Runnable} <add> * @param schedulerFactory a checked factory for {@link Consumer} of {@link Runnable} <ide> * <ide> * @return a {@link Flux} publishing asynchronously <ide> */ <ide> public static <T> Flux<T> publishOn(Publisher<? extends T> source, <del> Callable<? extends Consumer<Runnable>> schedulers) { <del> return new FluxPublishOn<>(source, schedulers); <add> Callable<? extends Consumer<Runnable>> schedulerFactory) { <add> return new FluxPublishOn<>(source, schedulerFactory); <ide> } <ide> <ide> <ide> * @param <T1> type of the value from source1 <ide> * @param <T2> type of the value from source2 <ide> * @param <T3> type of the value from source3 <add> * <add> * @return a zipped {@link Flux} <ide> */ <ide> @SuppressWarnings("unchecked") <ide> public static <T1, T2, T3> Flux<Tuple3<T1, T2, T3>> zip(Publisher<? extends T1> source1, <ide> * @param <T2> type of the value from source2 <ide> * @param <T3> type of the value from source3 <ide> * @param <T4> type of the value from source4 <add> * <add> * @return a zipped {@link Flux} <ide> */ <ide> @SuppressWarnings("unchecked") <ide> public static <T1, T2, T3, T4> Flux<Tuple4<T1, T2, T3, T4>> zip(Publisher<? extends T1> source1, <ide> * @param <T3> type of the value from source3 <ide> * @param <T4> type of the value from source4 <ide> * @param <T5> type of the value from source5 <add> * <add> * @return a zipped {@link Flux} <ide> */ <ide> @SuppressWarnings("unchecked") <ide> public static <T1, T2, T3, T4, T5> Flux<Tuple5<T1, T2, T3, T4, T5>> zip(Publisher<? extends T1> source1, <ide> * @param <T4> type of the value from source4 <ide> * @param <T5> type of the value from source5 <ide> * @param <T6> type of the value from source6 <add> * <add> * @return a zipped {@link Flux} <ide> */ <ide> @SuppressWarnings("unchecked") <ide> public static <T1, T2, T3, T4, T5, T6> Flux<Tuple6<T1, T2, T3, T4, T5, T6>> zip(Publisher<? extends T1> source1, <ide> * <p> <ide> * {@code flux.publishOn(WorkQueueProcessor.create()).subscribe(Subscribers.unbounded()) } <ide> * <del> * @param schedulers a checked factory for {@link Consumer} of {@link Runnable} <add> * @param schedulerFactory a checked factory for {@link Consumer} of {@link Runnable} <ide> * <ide> * @return a {@link Flux} publishing asynchronously <ide> */ <del> public final Flux<T> publishOn(Callable<? extends Consumer<Runnable>> schedulers) { <del> return publishOn(this, schedulers); <add> public final Flux<T> publishOn(Callable<? extends Consumer<Runnable>> schedulerFactory) { <add> return publishOn(this, schedulerFactory); <ide> } <ide> <ide> /** <ide> * Subscribe to the given fallback {@link Publisher} if an error is observed on this {@link Flux} <add> * <p> <add> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchonerror.png" alt=""> <add> * <p> <ide> * <ide> * @param fallback the alternate {@link Publisher} <del> * <p> <del> * <img width="500" src="https://raw.githubusercontent.com/reactor/projectreactor.io/master/src/main/static/assets/img/marble/switchonerror.png" alt=""> <del> * <p> <add> * <ide> * @return a new {@link Flux} <ide> */ <ide> public final Flux<T> switchOnError(final Publisher<? extends T> fallback) {
Java
apache-2.0
60b6946905985447c6ed2545ba08ff55ff06d3f4
0
maoueh/Ektorp,helun/Ektorp,YannRobert/Ektorp,maoueh/Ektorp,Arcticwolf/Ektorp,YannRobert/Ektorp,helun/Ektorp,Arcticwolf/Ektorp
package org.ektorp.impl; import static java.lang.String.*; import java.io.*; import java.util.*; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.ektorp.*; import org.ektorp.http.*; import org.ektorp.util.*; import org.slf4j.*; /** * * @author henrik lundgren * */ public class StdCouchDbInstance implements CouchDbInstance { private final static Logger LOG = LoggerFactory.getLogger(StdCouchDbInstance.class); private final static TypeReference<List<String>> STRING_LIST_TYPE_DEF = new TypeReference<List<String>>() {}; private final HttpClient client; private final RestTemplate restTemplate; private final ObjectMapper objectMapper; private final ObjectMapperFactory objectMapperFactory; public StdCouchDbInstance(HttpClient client) { this(client, new StdObjectMapperFactory()); } public StdCouchDbInstance(HttpClient client, ObjectMapperFactory of) { Assert.notNull(client, "HttpClient may not be null"); Assert.notNull(of, "ObjectMapperFactory may not be null"); this.client = client; this.restTemplate = new RestTemplate(client); this.objectMapper = of.createObjectMapper(); this.objectMapperFactory = of; } public ObjectMapperFactory getObjectMapperFactory() { return objectMapperFactory; } public void createDatabase(String path) { createDatabase(DbPath.fromString(path)); } public void createDatabase(DbPath db) { if (checkIfDbExists(db)) { throw new DbAccessException(format("A database with path %s already exists", db.getPath())); } LOG.debug("creating db path: {}", db.getPath()); restTemplate.put(db.getPath()); } public void deleteDatabase(String path) { Assert.notNull(path); restTemplate.delete(DbPath.fromString(path).getPath()); } @Override public boolean checkIfDbExists(String path) { return checkIfDbExists(DbPath.fromString(path)); } @Override public boolean checkIfDbExists(DbPath db) { return restTemplate.head(db.getPath(), new StdResponseHandler<Boolean>() { @Override public Boolean error(HttpResponse hr) { if(hr.getCode() == HttpStatus.NOT_FOUND) { // only 404 is a valid response, anything else is an error // see http://docs.couchdb.org/en/latest/api/database/common.html#head--db return false; } throw StdResponseHandler.createDbAccessException(hr); } @Override public Boolean success(HttpResponse hr) throws Exception { return true; } }); } public List<String> getAllDatabases() { return restTemplate.get("/_all_dbs", new StdResponseHandler<List<String>>(){ @Override public List<String> success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), STRING_LIST_TYPE_DEF); } }); } public ReplicationStatus replicate(ReplicationCommand cmd) { try { return restTemplate.post("/_replicate", objectMapper.writeValueAsString(cmd), new StdResponseHandler<ReplicationStatus>() { @Override public ReplicationStatus success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), ReplicationStatus.class); } }); } catch (IOException e) { throw Exceptions.propagate(e); } } public HttpClient getConnection() { return client; } public CouchDbConnector createConnector(String path, boolean createIfNotExists) { CouchDbConnector db = new StdCouchDbConnector(path, this, objectMapperFactory); if (createIfNotExists) db.createDatabaseIfNotExists(); return db; } @Override public CouchDbConnector getReplicatorConnector() { return createConnector("_replicator", false); } @Override public <T> T getConfiguration(final Class<T> c) { return getConfiguration(c, null, null); } @Override public <T> T getConfiguration(final Class<T> c, String section) { return getConfiguration(c, section, null); } @Override public <T> T getConfiguration(final Class<T> c, String section, String key) { Assert.notNull(c, "Class may not be null"); String url = "/_config"; if(section != null) { url = url + "/" + section; if(key != null) { url = url + "/" + key; } } return restTemplate.get(url, new StdResponseHandler<T>() { @Override public T success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), c); } }); } @Override public String getConfiguration(String section, String key) { return getConfiguration(String.class, section, key); } @Override public String setConfiguration(String section, String key, String value) { Assert.notNull(section, "Section may not be null"); Assert.notNull(key, "Key may not be null"); String url = "/_config/" + section + "/" + key; String content; try { content = objectMapper.writeValueAsString(value); } catch (JsonProcessingException e) { throw Exceptions.propagate(e); } return restTemplate.put(url, content, new StdResponseHandler<String>() { @Override public String success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), String.class); } }); } @Override public String deleteConfiguration(String section, String key) { Assert.notNull(section, "Section may not be null"); Assert.notNull(key, "Key may not be null"); String url = "/_config/" + section + "/" + key; return restTemplate.delete(url, new StdResponseHandler<String>() { @Override public String success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), String.class); } }); } @Override public Collection<ActiveTask> getActiveTasks() { String url = "/_active_tasks"; List<StdActiveTask> tasks = restTemplate.get(url, new StdResponseHandler<List<StdActiveTask>>() { @Override public List<StdActiveTask> success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), new TypeReference<List<StdActiveTask>>() {}); } }); // We have to copy the list here because Java lacks covariance (i.e. we can't just return // the List<StdActiveTask> because it's not a Collection<ActiveTask>). Collection<ActiveTask> ret = new ArrayList<ActiveTask>(); for (StdActiveTask task : tasks) { ret.add(task); } return ret; } }
org.ektorp/src/main/java/org/ektorp/impl/StdCouchDbInstance.java
package org.ektorp.impl; import static java.lang.String.*; import java.io.*; import java.util.*; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.ektorp.*; import org.ektorp.http.*; import org.ektorp.util.*; import org.slf4j.*; /** * * @author henrik lundgren * */ public class StdCouchDbInstance implements CouchDbInstance { private final static Logger LOG = LoggerFactory.getLogger(StdCouchDbInstance.class); private final static TypeReference<List<String>> STRING_LIST_TYPE_DEF = new TypeReference<List<String>>() {}; private final HttpClient client; private final RestTemplate restTemplate; private final ObjectMapper objectMapper; private final ObjectMapperFactory objectMapperFactory; public StdCouchDbInstance(HttpClient client) { this(client, new StdObjectMapperFactory()); } public StdCouchDbInstance(HttpClient client, ObjectMapperFactory of) { Assert.notNull(client, "HttpClient may not be null"); Assert.notNull(of, "ObjectMapperFactory may not be null"); this.client = client; this.restTemplate = new RestTemplate(client); this.objectMapper = of.createObjectMapper(); this.objectMapperFactory = of; } public ObjectMapperFactory getObjectMapperFactory() { return objectMapperFactory; } public void createDatabase(String path) { createDatabase(DbPath.fromString(path)); } public void createDatabase(DbPath db) { if (checkIfDbExists(db)) { throw new DbAccessException(format("A database with path %s already exists", db.getPath())); } LOG.debug("creating db path: {}", db.getPath()); restTemplate.put(db.getPath()); } public void deleteDatabase(String path) { Assert.notNull(path); restTemplate.delete(DbPath.fromString(path).getPath()); } @Override public boolean checkIfDbExists(String path) { return checkIfDbExists(DbPath.fromString(path)); } @Override public boolean checkIfDbExists(DbPath db) { return restTemplate.head(db.getPath(), new StdResponseHandler<Boolean>() { @Override public Boolean error(HttpResponse hr) { return false; } @Override public Boolean success(HttpResponse hr) throws Exception { return true; } }); } public List<String> getAllDatabases() { return restTemplate.get("/_all_dbs", new StdResponseHandler<List<String>>(){ @Override public List<String> success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), STRING_LIST_TYPE_DEF); } }); } public ReplicationStatus replicate(ReplicationCommand cmd) { try { return restTemplate.post("/_replicate", objectMapper.writeValueAsString(cmd), new StdResponseHandler<ReplicationStatus>() { @Override public ReplicationStatus success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), ReplicationStatus.class); } }); } catch (IOException e) { throw Exceptions.propagate(e); } } public HttpClient getConnection() { return client; } public CouchDbConnector createConnector(String path, boolean createIfNotExists) { CouchDbConnector db = new StdCouchDbConnector(path, this, objectMapperFactory); if (createIfNotExists) db.createDatabaseIfNotExists(); return db; } @Override public CouchDbConnector getReplicatorConnector() { return createConnector("_replicator", false); } @Override public <T> T getConfiguration(final Class<T> c) { return getConfiguration(c, null, null); } @Override public <T> T getConfiguration(final Class<T> c, String section) { return getConfiguration(c, section, null); } @Override public <T> T getConfiguration(final Class<T> c, String section, String key) { Assert.notNull(c, "Class may not be null"); String url = "/_config"; if(section != null) { url = url + "/" + section; if(key != null) { url = url + "/" + key; } } return restTemplate.get(url, new StdResponseHandler<T>() { @Override public T success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), c); } }); } @Override public String getConfiguration(String section, String key) { return getConfiguration(String.class, section, key); } @Override public String setConfiguration(String section, String key, String value) { Assert.notNull(section, "Section may not be null"); Assert.notNull(key, "Key may not be null"); String url = "/_config/" + section + "/" + key; String content; try { content = objectMapper.writeValueAsString(value); } catch (JsonProcessingException e) { throw Exceptions.propagate(e); } return restTemplate.put(url, content, new StdResponseHandler<String>() { @Override public String success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), String.class); } }); } @Override public String deleteConfiguration(String section, String key) { Assert.notNull(section, "Section may not be null"); Assert.notNull(key, "Key may not be null"); String url = "/_config/" + section + "/" + key; return restTemplate.delete(url, new StdResponseHandler<String>() { @Override public String success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), String.class); } }); } @Override public Collection<ActiveTask> getActiveTasks() { String url = "/_active_tasks"; List<StdActiveTask> tasks = restTemplate.get(url, new StdResponseHandler<List<StdActiveTask>>() { @Override public List<StdActiveTask> success(HttpResponse hr) throws Exception { return objectMapper.readValue(hr.getContent(), new TypeReference<List<StdActiveTask>>() {}); } }); // We have to copy the list here because Java lacks covariance (i.e. we can't just return // the List<StdActiveTask> because it's not a Collection<ActiveTask>). Collection<ActiveTask> ret = new ArrayList<ActiveTask>(); for (StdActiveTask task : tasks) { ret.add(task); } return ret; } }
Added 404 handling to checkIfDbExists. This fixes #159.
org.ektorp/src/main/java/org/ektorp/impl/StdCouchDbInstance.java
Added 404 handling to checkIfDbExists. This fixes #159.
<ide><path>rg.ektorp/src/main/java/org/ektorp/impl/StdCouchDbInstance.java <ide> return restTemplate.head(db.getPath(), new StdResponseHandler<Boolean>() { <ide> @Override <ide> public Boolean error(HttpResponse hr) { <del> return false; <add> if(hr.getCode() == HttpStatus.NOT_FOUND) { <add> // only 404 is a valid response, anything else is an error <add> // see http://docs.couchdb.org/en/latest/api/database/common.html#head--db <add> return false; <add> } <add> throw StdResponseHandler.createDbAccessException(hr); <ide> } <ide> @Override <ide> public Boolean success(HttpResponse hr) throws Exception { <del> return true; <add> return true; <ide> } <ide> }); <ide> }
Java
lgpl-2.1
302da773c9230dbbb31c2ab0856f0fefdc8b54fc
0
pferraro/wildfly,tomazzupan/wildfly,pferraro/wildfly,iweiss/wildfly,rhusar/wildfly,golovnin/wildfly,99sono/wildfly,99sono/wildfly,tadamski/wildfly,wildfly/wildfly,xasx/wildfly,tomazzupan/wildfly,jstourac/wildfly,xasx/wildfly,golovnin/wildfly,iweiss/wildfly,pferraro/wildfly,xasx/wildfly,99sono/wildfly,tadamski/wildfly,wildfly/wildfly,jstourac/wildfly,rhusar/wildfly,pferraro/wildfly,golovnin/wildfly,jstourac/wildfly,tomazzupan/wildfly,wildfly/wildfly,rhusar/wildfly,jstourac/wildfly,iweiss/wildfly,rhusar/wildfly,iweiss/wildfly,wildfly/wildfly,tadamski/wildfly
/* * JBoss, Home of Professional Open Source. * Copyright 2011, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.server; import java.util.List; import java.util.concurrent.ExecutorService; import javax.xml.namespace.QName; import org.jboss.as.controller.extension.ExtensionRegistry; import org.jboss.as.controller.parsing.Namespace; import org.jboss.as.server.parsing.StandaloneXml; import org.jboss.as.controller.persistence.BackupXmlConfigurationPersister; import org.jboss.as.controller.persistence.ExtensibleConfigurationPersister; import org.jboss.modules.Module; import org.jboss.modules.ModuleLoader; import org.jboss.msc.service.ServiceActivator; import org.jboss.msc.service.ServiceContainer; import org.jboss.threads.AsyncFuture; /** * The application server bootstrap interface. Get a new instance via {@link Factory#newInstance()}. * * @author <a href="mailto:[email protected]">David M. Lloyd</a> * @author [email protected] */ public interface Bootstrap { /** * Bootstrap a new server instance. The list of updates should begin with extensions, followed by * subsystem adds, followed by deployments. The boot action of each update will be executed; this is * the only time that this will happen. This method will not block; the return value may be used to * wait for the result (with an optional timeout) or register an asynchronous callback. * * @param configuration the server configuration * @param extraServices additional services to start and stop with the server instance * @return the future service container */ AsyncFuture<ServiceContainer> bootstrap(Configuration configuration, List<ServiceActivator> extraServices); /** * Calls {@link #bootstrap(Configuration, List)} to bootstrap the container. The value for the returned future * becomes available when all installed services have been started/failed. * * @param configuration the server configuration * @param extraServices additional services to start and stop with the server instance * @return the future service container */ AsyncFuture<ServiceContainer> startup(Configuration configuration, List<ServiceActivator> extraServices); /** * The configuration for server bootstrap. */ final class Configuration { private final ServerEnvironment serverEnvironment; private final ExtensionRegistry extensionRegistry; private ModuleLoader moduleLoader = Module.getBootModuleLoader(); private ConfigurationPersisterFactory configurationPersisterFactory; private long startTime = Module.getStartTime(); public Configuration(final ServerEnvironment serverEnvironment) { assert serverEnvironment != null : "serverEnvironment is null"; this.serverEnvironment = serverEnvironment; this.extensionRegistry = new ExtensionRegistry(serverEnvironment.getLaunchType().getProcessType()); } /** * Set the port offset. * * @param portOffset the port offset */ public void setPortOffset(final int portOffset) { if (portOffset < 0) { throw new IllegalArgumentException("portOffset may not be less than 0"); } } /** * Get the server environment. * * @return the server environment. Will not be {@code null} */ public ServerEnvironment getServerEnvironment() { return serverEnvironment; } /** * Get the extension registry. * * @return the extension registry. Will not be {@code null} */ public ExtensionRegistry getExtensionRegistry() { return extensionRegistry; } /** * Get the application server module loader. * * @return the module loader */ public ModuleLoader getModuleLoader() { return moduleLoader; } /** * Set the application server module loader. * * @param moduleLoader the module loader */ public void setModuleLoader(final ModuleLoader moduleLoader) { this.moduleLoader = moduleLoader; } /** * Get the factory for the configuration persister to use. * * @return the configuration persister factory */ public synchronized ConfigurationPersisterFactory getConfigurationPersisterFactory() { if (configurationPersisterFactory == null) { // if (serverEnvironment == null) { // final ModuleLoader localModuleLoader = this.moduleLoader; // configurationPersisterFactory = new ConfigurationPersisterFactory() { // @Override // public ExtensibleConfigurationPersister createConfigurationPersister(ServerEnvironment serverEnvironment, ExecutorService executorService) { // return new NullConfigurationPersister(new StandaloneXml(localModuleLoader, executorService)); // } // }; // } // else { configurationPersisterFactory = new ConfigurationPersisterFactory() { @Override public ExtensibleConfigurationPersister createConfigurationPersister(ServerEnvironment serverEnvironment, ExecutorService executorService) { QName rootElement = new QName(Namespace.CURRENT.getUriString(), "server"); StandaloneXml parser = new StandaloneXml(Module.getBootModuleLoader(), executorService, extensionRegistry); BackupXmlConfigurationPersister persister = new BackupXmlConfigurationPersister(serverEnvironment.getServerConfigurationFile(), rootElement, parser, parser); persister.registerAdditionalRootElement(new QName(Namespace.DOMAIN_1_0.getUriString(), "server"), parser); extensionRegistry.setWriterRegistry(persister); return persister; } }; // } } return configurationPersisterFactory; } /** * Set the configuration persister factory to use. * * @param configurationPersisterFactory the configuration persister factory */ public synchronized void setConfigurationPersisterFactory(final ConfigurationPersisterFactory configurationPersisterFactory) { this.configurationPersisterFactory = configurationPersisterFactory; } /** * Get the server start time to report in the logs. * * @return the server start time */ public long getStartTime() { return startTime; } /** * Set the server start time to report in the logs. * * @param startTime the server start time */ public void setStartTime(final long startTime) { this.startTime = startTime; } } /** A factory for the {@link ExtensibleConfigurationPersister} to be used by this server */ interface ConfigurationPersisterFactory { /** * * @param serverEnvironment the server environment. Cannot be {@code null} * @param executorService an executor service the configuration persister can use. * May be {@code null} if asynchronous work is not supported * @return the configuration persister. Will not be {@code null} */ ExtensibleConfigurationPersister createConfigurationPersister(final ServerEnvironment serverEnvironment, final ExecutorService executorService); } /** * The factory for creating new instances of {@link org.jboss.as.server.Bootstrap}. */ final class Factory { private Factory() { } /** * Create a new instance. * * @return the new bootstrap instance */ public static Bootstrap newInstance() { return new BootstrapImpl(); } } }
server/src/main/java/org/jboss/as/server/Bootstrap.java
/* * JBoss, Home of Professional Open Source. * Copyright 2011, Red Hat, Inc., and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.server; import java.util.List; import java.util.concurrent.ExecutorService; import javax.xml.namespace.QName; import org.jboss.as.controller.extension.ExtensionRegistry; import org.jboss.as.controller.parsing.Namespace; import org.jboss.as.server.parsing.StandaloneXml; import org.jboss.as.controller.persistence.BackupXmlConfigurationPersister; import org.jboss.as.controller.persistence.ExtensibleConfigurationPersister; import org.jboss.modules.Module; import org.jboss.modules.ModuleLoader; import org.jboss.msc.service.ServiceActivator; import org.jboss.msc.service.ServiceContainer; import org.jboss.threads.AsyncFuture; /** * The application server bootstrap interface. Get a new instance via {@link Factory#newInstance()}. * * @author <a href="mailto:[email protected]">David M. Lloyd</a> * @author [email protected] */ public interface Bootstrap { /** * Bootstrap a new server instance. The list of updates should begin with extensions, followed by * subsystem adds, followed by deployments. The boot action of each update will be executed; this is * the only time that this will happen. This method will not block; the return value may be used to * wait for the result (with an optional timeout) or register an asynchronous callback. * * @param configuration the server configuration * @param extraServices additional services to start and stop with the server instance * @return the future service container */ AsyncFuture<ServiceContainer> bootstrap(Configuration configuration, List<ServiceActivator> extraServices); /** * Calls {@link #bootstrap(Configuration, List)} to bootstrap the container. The value for the returned future * becomes available when all installed services have been started/failed. * * @param configuration the server configuration * @param extraServices additional services to start and stop with the server instance * @return the future service container */ AsyncFuture<ServiceContainer> startup(Configuration configuration, List<ServiceActivator> extraServices); /** * The configuration for server bootstrap. */ final class Configuration { private final ServerEnvironment serverEnvironment; private final ExtensionRegistry extensionRegistry; private ModuleLoader moduleLoader = Module.getBootModuleLoader(); private ConfigurationPersisterFactory configurationPersisterFactory; private long startTime = Module.getStartTime(); public Configuration(final ServerEnvironment serverEnvironment) { assert serverEnvironment == null : "serverEnvironment is null"; this.serverEnvironment = serverEnvironment; this.extensionRegistry = new ExtensionRegistry(serverEnvironment.getLaunchType().getProcessType()); } /** * Set the port offset. * * @param portOffset the port offset */ public void setPortOffset(final int portOffset) { if (portOffset < 0) { throw new IllegalArgumentException("portOffset may not be less than 0"); } } /** * Get the server environment. * * @return the server environment. Will not be {@code null} */ public ServerEnvironment getServerEnvironment() { return serverEnvironment; } /** * Get the extension registry. * * @return the extension registry. Will not be {@code null} */ public ExtensionRegistry getExtensionRegistry() { return extensionRegistry; } /** * Get the application server module loader. * * @return the module loader */ public ModuleLoader getModuleLoader() { return moduleLoader; } /** * Set the application server module loader. * * @param moduleLoader the module loader */ public void setModuleLoader(final ModuleLoader moduleLoader) { this.moduleLoader = moduleLoader; } /** * Get the factory for the configuration persister to use. * * @return the configuration persister factory */ public synchronized ConfigurationPersisterFactory getConfigurationPersisterFactory() { if (configurationPersisterFactory == null) { // if (serverEnvironment == null) { // final ModuleLoader localModuleLoader = this.moduleLoader; // configurationPersisterFactory = new ConfigurationPersisterFactory() { // @Override // public ExtensibleConfigurationPersister createConfigurationPersister(ServerEnvironment serverEnvironment, ExecutorService executorService) { // return new NullConfigurationPersister(new StandaloneXml(localModuleLoader, executorService)); // } // }; // } // else { configurationPersisterFactory = new ConfigurationPersisterFactory() { @Override public ExtensibleConfigurationPersister createConfigurationPersister(ServerEnvironment serverEnvironment, ExecutorService executorService) { QName rootElement = new QName(Namespace.CURRENT.getUriString(), "server"); StandaloneXml parser = new StandaloneXml(Module.getBootModuleLoader(), executorService, extensionRegistry); BackupXmlConfigurationPersister persister = new BackupXmlConfigurationPersister(serverEnvironment.getServerConfigurationFile(), rootElement, parser, parser); persister.registerAdditionalRootElement(new QName(Namespace.DOMAIN_1_0.getUriString(), "server"), parser); extensionRegistry.setWriterRegistry(persister); return persister; } }; // } } return configurationPersisterFactory; } /** * Set the configuration persister factory to use. * * @param configurationPersisterFactory the configuration persister factory */ public synchronized void setConfigurationPersisterFactory(final ConfigurationPersisterFactory configurationPersisterFactory) { this.configurationPersisterFactory = configurationPersisterFactory; } /** * Get the server start time to report in the logs. * * @return the server start time */ public long getStartTime() { return startTime; } /** * Set the server start time to report in the logs. * * @param startTime the server start time */ public void setStartTime(final long startTime) { this.startTime = startTime; } } /** A factory for the {@link ExtensibleConfigurationPersister} to be used by this server */ interface ConfigurationPersisterFactory { /** * * @param serverEnvironment the server environment. Cannot be {@code null} * @param executorService an executor service the configuration persister can use. * May be {@code null} if asynchronous work is not supported * @return the configuration persister. Will not be {@code null} */ ExtensibleConfigurationPersister createConfigurationPersister(final ServerEnvironment serverEnvironment, final ExecutorService executorService); } /** * The factory for creating new instances of {@link org.jboss.as.server.Bootstrap}. */ final class Factory { private Factory() { } /** * Create a new instance. * * @return the new bootstrap instance */ public static Bootstrap newInstance() { return new BootstrapImpl(); } } }
Fix backwards assertion
server/src/main/java/org/jboss/as/server/Bootstrap.java
Fix backwards assertion
<ide><path>erver/src/main/java/org/jboss/as/server/Bootstrap.java <ide> private long startTime = Module.getStartTime(); <ide> <ide> public Configuration(final ServerEnvironment serverEnvironment) { <del> assert serverEnvironment == null : "serverEnvironment is null"; <add> assert serverEnvironment != null : "serverEnvironment is null"; <ide> this.serverEnvironment = serverEnvironment; <ide> this.extensionRegistry = new ExtensionRegistry(serverEnvironment.getLaunchType().getProcessType()); <ide> }
Java
apache-2.0
95923d1f42f82ab885798780ab2a7389b58f9f37
0
apache/tomcat,apache/tomcat,apache/tomcat,Nickname0806/Test_Q4,Nickname0806/Test_Q4,Nickname0806/Test_Q4,apache/tomcat,apache/tomcat,Nickname0806/Test_Q4
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.comet; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import javax.net.SocketFactory; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Test; import org.apache.catalina.Context; import org.apache.catalina.Wrapper; import org.apache.catalina.comet.CometEvent.EventType; import org.apache.catalina.connector.CometEventImpl; import org.apache.catalina.connector.Request; import org.apache.catalina.connector.Response; import org.apache.catalina.startup.Tomcat; import org.apache.catalina.startup.TomcatBaseTest; import org.apache.catalina.valves.TesterAccessLogValve; import org.apache.catalina.valves.ValveBase; public class TestCometProcessor extends TomcatBaseTest { @Test public void testAsyncClose() throws Exception { if (!isCometSupported()) { log.info("This test is skipped, because this connector does not support Comet."); return; } // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); Context root = tomcat.addContext("", TEMP_DIR); Tomcat.addServlet(root, "comet", new SimpleCometServlet()); root.addServletMapping("/comet", "comet"); Tomcat.addServlet(root, "hello", new HelloWorldServlet()); root.addServletMapping("/hello", "hello"); root.getPipeline().addValve(new AsyncCometCloseValve()); tomcat.getConnector().setProperty("connectionTimeout", "5000"); tomcat.start(); // Create connection to Comet servlet final Socket socket = SocketFactory.getDefault().createSocket("localhost", getPort()); socket.setSoTimeout(5000); final OutputStream os = socket.getOutputStream(); String requestLine = "POST http://localhost:" + getPort() + "/comet HTTP/1.1\r\n"; os.write(requestLine.getBytes()); os.write("transfer-encoding: chunked\r\n".getBytes()); os.write("\r\n".getBytes()); InputStream is = socket.getInputStream(); ResponseReaderThread readThread = new ResponseReaderThread(is); readThread.start(); // Wait for the comet request/response to finish int count = 0; while (count < 10 && !readThread.getResponse().endsWith("0\r\n\r\n")) { Thread.sleep(500); count++; } if (count == 10) { fail("Comet request did not complete"); } // Send a standard HTTP request on the same connection requestLine = "GET http://localhost:" + getPort() + "/hello HTTP/1.1\r\n"; os.write(requestLine.getBytes()); os.write("\r\n".getBytes()); // Check for the expected response count = 0; while (count < 10 && !readThread.getResponse().contains( HelloWorldServlet.RESPONSE_TEXT)) { Thread.sleep(500); count++; } if (count == 10) { fail("Non-comet request did not complete"); } readThread.join(); os.close(); is.close(); } @Test public void testSimpleCometClient() throws Exception { doSimpleCometTest(null); } @Test public void testSimpleCometClientBeginFail() throws Exception { doSimpleCometTest(SimpleCometServlet.FAIL_ON_BEGIN); } @Test public void testSimpleCometClientReadFail() throws Exception { doSimpleCometTest(SimpleCometServlet.FAIL_ON_READ); } @Test public void testSimpleCometClientEndFail() throws Exception { doSimpleCometTest(SimpleCometServlet.FAIL_ON_END); } private void doSimpleCometTest(String initParam) throws Exception { if (!isCometSupported()) { log.info("This test is skipped, because this connector does not support Comet."); return; } // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); Context root = tomcat.addContext("", TEMP_DIR); Wrapper w = Tomcat.addServlet(root, "comet", new SimpleCometServlet()); if (initParam != null) { w.addInitParameter(initParam, "true"); } root.addServletMapping("/", "comet"); TesterAccessLogValve alv = new TesterAccessLogValve(); root.getPipeline().addValve(alv); tomcat.start(); // Create connection to Comet servlet final Socket socket = SocketFactory.getDefault().createSocket("localhost", getPort()); socket.setSoTimeout(60000); final OutputStream os = socket.getOutputStream(); String requestLine = "POST http://localhost:" + getPort() + "/ HTTP/1.1\r\n"; os.write(requestLine.getBytes()); os.write("transfer-encoding: chunked\r\n".getBytes()); os.write("\r\n".getBytes()); PingWriterThread writeThread = new PingWriterThread(4, os); writeThread.start(); socket.setSoTimeout(25000); InputStream is = socket.getInputStream(); ResponseReaderThread readThread = new ResponseReaderThread(is); readThread.start(); readThread.join(); os.close(); is.close(); String[] response = readThread.getResponse().split("\r\n"); if (initParam == null) { // Normal response expected // Validate response assertEquals("HTTP/1.1 200 OK", response[0]); assertEquals("Server: Apache-Coyote/1.1", response[1]); assertTrue(response[2].startsWith("Set-Cookie: JSESSIONID=")); assertEquals("Content-Type: text/plain;charset=ISO-8859-1", response[3]); assertEquals("Transfer-Encoding: chunked", response[4]); assertTrue(response[5].startsWith("Date: ")); assertEquals("", response[6]); assertEquals("7", response[7]); assertEquals("BEGIN", response[8]); assertEquals("", response[9]); assertEquals("17", response[10]); assertEquals("Client: READ: 4 bytes", response[11]); assertEquals("", response[12]); assertEquals("17", response[13]); assertEquals("Client: READ: 4 bytes", response[14]); assertEquals("", response[15]); assertEquals("17", response[16]); assertEquals("Client: READ: 4 bytes", response[17]); assertEquals("", response[18]); assertEquals("17", response[19]); assertEquals("Client: READ: 4 bytes", response[20]); assertEquals("", response[21]); assertEquals("d", response[22]); assertEquals("Client: END", response[23]); assertEquals("", response[24]); assertEquals("0", response[25]); // Expect 26 lines assertEquals(26, response.length); } else { // Failure expected only expected for the fail on begin // Failure at any later stage and the response headers (including // the 200 response code will already have been sent to the client if (SimpleCometServlet.FAIL_ON_BEGIN.equals(initParam)) { assertEquals("HTTP/1.1 500 Internal Server Error", response[0]); alv.validateAccessLog(1, 500, 0, 1000); } else { assertEquals("HTTP/1.1 200 OK", response[0]); alv.validateAccessLog(1, 200, 0, 5000); } } } /** * Tests if the Comet connection is closed if the Tomcat connector is * stopped. */ @Test public void testCometConnectorStop() throws Exception { if (!isCometSupported()) { log.info("This test is skipped, because this connector does not support Comet."); return; } // Setup Tomcat instance SimpleCometServlet servlet = new SimpleCometServlet(); Tomcat tomcat = getTomcatInstance(); Context root = tomcat.addContext("", TEMP_DIR); Tomcat.addServlet(root, "comet", servlet); root.addServletMapping("/", "comet"); tomcat.start(); // Create connection to Comet servlet final Socket socket = SocketFactory.getDefault().createSocket("localhost", getPort()); socket.setSoTimeout(10000); final OutputStream os = socket.getOutputStream(); String requestLine = "POST http://localhost:" + getPort() + "/ HTTP/1.1\r\n"; os.write(requestLine.getBytes()); os.write("transfer-encoding: chunked\r\n".getBytes()); os.write("\r\n".getBytes()); PingWriterThread writeThread = new PingWriterThread(100, os); writeThread.start(); InputStream is = socket.getInputStream(); ResponseReaderThread readThread = new ResponseReaderThread(is); readThread.start(); // Allow the first couple of PING messages to be written Thread.sleep(3000); tomcat.getConnector().stop(); int count = 0; // Wait for the read thread to stop while (readThread.isAlive() && count < 50) { Thread.sleep(100); count ++; } // Wait for the write thread to stop count = 0; while (writeThread.isAlive() && count < 50) { Thread.sleep(100); count ++; } // Destroy the connector once the executor has sent the end event tomcat.getConnector().destroy(); String[] response = readThread.getResponse().split("\r\n"); String lastMessage = ""; String lastResponseLine = ""; for (int i = response.length; --i >= 0;) { lastMessage = response[i]; if (lastMessage.startsWith("Client:")) { break; } } for (int i = response.length; --i >= 0;) { lastResponseLine = response[i]; if (lastResponseLine.length() > 0) { break; } } StringBuilder status = new StringBuilder(); // Expected, but is not 100% reliable: // WriteThread exception: java.net.SocketException // ReaderThread exception: null // Last message: [Client: END] // Last response line: [0] (empty chunk) // Last comet event: [END] status.append("Status:"); status.append("\nWriterThread exception: " + writeThread.getException()); status.append("\nReaderThread exception: " + readThread.getException()); status.append("\nLast message: [" + lastMessage + "]"); status.append("\nLast response line: [" + lastResponseLine + "]"); status.append("\nLast comet event: [" + servlet.getLastEvent() + "]"); if (writeThread.getException() == null || !lastMessage.contains("Client: END") || !EventType.END.equals(servlet.getLastEvent())) { log.error(status); } else { log.info(status); } assertTrue("Comet END event not received", EventType.END.equals(servlet.getLastEvent())); } private boolean isCometSupported() { String protocol = getTomcatInstance().getConnector().getProtocolHandlerClassName(); if (protocol.indexOf("Nio") == -1 && protocol.indexOf("Apr") == -1) { return false; } else { return true; } } private static class SimpleCometServlet extends HttpServlet implements CometProcessor { private static final long serialVersionUID = 1L; public static final String FAIL_ON_BEGIN = "failOnBegin"; public static final String FAIL_ON_READ = "failOnRead"; public static final String FAIL_ON_END = "failOnEnd"; private boolean failOnBegin = false; private boolean failOnRead = false; private boolean failOnEnd = false; private volatile EventType lastEvent; public EventType getLastEvent() { return lastEvent; } @Override public void init() throws ServletException { failOnBegin = Boolean.valueOf(getServletConfig().getInitParameter( FAIL_ON_BEGIN)).booleanValue(); failOnRead = Boolean.valueOf(getServletConfig().getInitParameter( FAIL_ON_READ)).booleanValue(); failOnEnd = Boolean.valueOf(getServletConfig().getInitParameter( FAIL_ON_END)).booleanValue(); } @Override public void event(CometEvent event) throws IOException, ServletException { HttpServletRequest request = event.getHttpServletRequest(); HttpServletResponse response = event.getHttpServletResponse(); HttpSession session = request.getSession(true); session.setMaxInactiveInterval(30); lastEvent = event.getEventType(); if (event.getEventType() == EventType.BEGIN) { if (failOnBegin) { throw new IOException("Fail on begin"); } response.setContentType("text/plain"); response.getWriter().print("BEGIN" + "\r\n"); } else if (event.getEventType() == EventType.READ) { if (failOnRead) { throw new IOException("Fail on read"); } InputStream is = request.getInputStream(); int count = 0; while (is.available() > 0) { is.read(); count ++; } String msg = "READ: " + count + " bytes"; response.getWriter().print("Client: " + msg + "\r\n"); } else if (event.getEventType() == EventType.END) { if (failOnEnd) { throw new IOException("Fail on end"); } String msg = "END"; response.getWriter().print("Client: " + msg + "\r\n"); event.close(); } else { response.getWriter().print(event.getEventSubType() + "\r\n"); event.close(); } response.getWriter().flush(); } } private static class PingWriterThread extends Thread { private final int pingCount; private final OutputStream os; private volatile Exception e = null; public PingWriterThread(int pingCount, OutputStream os) { this.pingCount = pingCount; this.os = os; } public Exception getException() { return e; } @Override public void run() { try { for (int i = 0; i < pingCount; i++) { os.write("4\r\n".getBytes()); os.write("PING\r\n".getBytes()); os.flush(); Thread.sleep(1000); } os.write("0\r\n".getBytes()); os.write("\r\n".getBytes()); } catch (Exception e) { this.e = e; } } } private static class ResponseReaderThread extends Thread { private final InputStream is; private final StringBuilder response = new StringBuilder(); private volatile Exception e = null; public ResponseReaderThread(InputStream is) { this.is = is; } public Exception getException() { return e; } public String getResponse() { return response.toString(); } @Override public void run() { try { int c = is.read(); while (c > -1) { response.append((char) c); c = is.read(); } } catch (Exception e) { this.e = e; } } } private static class AsyncCometCloseValve extends ValveBase { @Override public void invoke(Request request, Response response) throws IOException, ServletException { CometEventImpl event = new CometEventImpl(request, response); getNext().invoke(request, response); if (request.isComet()) { Thread t = new AsyncCometCloseThread(event); t.start(); } } } private static class AsyncCometCloseThread extends Thread { private final CometEvent event; public AsyncCometCloseThread(CometEvent event) { this.event = event; } @Override public void run() { try { Thread.sleep(2000); event.close(); } catch (Exception e) { // Test should fail. Report what went wrong. e.printStackTrace(); } } } }
test/org/apache/catalina/comet/TestCometProcessor.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.comet; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import javax.net.SocketFactory; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Test; import org.apache.catalina.Context; import org.apache.catalina.Wrapper; import org.apache.catalina.comet.CometEvent.EventType; import org.apache.catalina.connector.CometEventImpl; import org.apache.catalina.connector.Request; import org.apache.catalina.connector.Response; import org.apache.catalina.startup.Tomcat; import org.apache.catalina.startup.TomcatBaseTest; import org.apache.catalina.valves.TesterAccessLogValve; import org.apache.catalina.valves.ValveBase; public class TestCometProcessor extends TomcatBaseTest { @Test public void testAsyncClose() throws Exception { if (!isCometSupported()) { return; } // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); Context root = tomcat.addContext("", TEMP_DIR); Tomcat.addServlet(root, "comet", new SimpleCometServlet()); root.addServletMapping("/comet", "comet"); Tomcat.addServlet(root, "hello", new HelloWorldServlet()); root.addServletMapping("/hello", "hello"); root.getPipeline().addValve(new AsyncCometCloseValve()); tomcat.getConnector().setProperty("connectionTimeout", "5000"); tomcat.start(); // Create connection to Comet servlet final Socket socket = SocketFactory.getDefault().createSocket("localhost", getPort()); socket.setSoTimeout(5000); final OutputStream os = socket.getOutputStream(); String requestLine = "POST http://localhost:" + getPort() + "/comet HTTP/1.1\r\n"; os.write(requestLine.getBytes()); os.write("transfer-encoding: chunked\r\n".getBytes()); os.write("\r\n".getBytes()); InputStream is = socket.getInputStream(); ResponseReaderThread readThread = new ResponseReaderThread(is); readThread.start(); // Wait for the comet request/response to finish int count = 0; while (count < 10 && !readThread.getResponse().endsWith("0\r\n\r\n")) { Thread.sleep(500); count++; } if (count == 10) { fail("Comet request did not complete"); } // Send a standard HTTP request on the same connection requestLine = "GET http://localhost:" + getPort() + "/hello HTTP/1.1\r\n"; os.write(requestLine.getBytes()); os.write("\r\n".getBytes()); // Check for the expected response count = 0; while (count < 10 && !readThread.getResponse().contains( HelloWorldServlet.RESPONSE_TEXT)) { Thread.sleep(500); count++; } if (count == 10) { fail("Non-comet request did not complete"); } readThread.join(); os.close(); is.close(); } @Test public void testSimpleCometClient() throws Exception { doSimpleCometTest(null); } @Test public void testSimpleCometClientBeginFail() throws Exception { doSimpleCometTest(SimpleCometServlet.FAIL_ON_BEGIN); } @Test public void testSimpleCometClientReadFail() throws Exception { doSimpleCometTest(SimpleCometServlet.FAIL_ON_READ); } @Test public void testSimpleCometClientEndFail() throws Exception { doSimpleCometTest(SimpleCometServlet.FAIL_ON_END); } private void doSimpleCometTest(String initParam) throws Exception { if (!isCometSupported()) { return; } // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); Context root = tomcat.addContext("", TEMP_DIR); Wrapper w = Tomcat.addServlet(root, "comet", new SimpleCometServlet()); if (initParam != null) { w.addInitParameter(initParam, "true"); } root.addServletMapping("/", "comet"); TesterAccessLogValve alv = new TesterAccessLogValve(); root.getPipeline().addValve(alv); tomcat.start(); // Create connection to Comet servlet final Socket socket = SocketFactory.getDefault().createSocket("localhost", getPort()); socket.setSoTimeout(60000); final OutputStream os = socket.getOutputStream(); String requestLine = "POST http://localhost:" + getPort() + "/ HTTP/1.1\r\n"; os.write(requestLine.getBytes()); os.write("transfer-encoding: chunked\r\n".getBytes()); os.write("\r\n".getBytes()); PingWriterThread writeThread = new PingWriterThread(4, os); writeThread.start(); socket.setSoTimeout(25000); InputStream is = socket.getInputStream(); ResponseReaderThread readThread = new ResponseReaderThread(is); readThread.start(); readThread.join(); os.close(); is.close(); String[] response = readThread.getResponse().split("\r\n"); if (initParam == null) { // Normal response expected // Validate response assertEquals("HTTP/1.1 200 OK", response[0]); assertEquals("Server: Apache-Coyote/1.1", response[1]); assertTrue(response[2].startsWith("Set-Cookie: JSESSIONID=")); assertEquals("Content-Type: text/plain;charset=ISO-8859-1", response[3]); assertEquals("Transfer-Encoding: chunked", response[4]); assertTrue(response[5].startsWith("Date: ")); assertEquals("", response[6]); assertEquals("7", response[7]); assertEquals("BEGIN", response[8]); assertEquals("", response[9]); assertEquals("17", response[10]); assertEquals("Client: READ: 4 bytes", response[11]); assertEquals("", response[12]); assertEquals("17", response[13]); assertEquals("Client: READ: 4 bytes", response[14]); assertEquals("", response[15]); assertEquals("17", response[16]); assertEquals("Client: READ: 4 bytes", response[17]); assertEquals("", response[18]); assertEquals("17", response[19]); assertEquals("Client: READ: 4 bytes", response[20]); assertEquals("", response[21]); assertEquals("d", response[22]); assertEquals("Client: END", response[23]); assertEquals("", response[24]); assertEquals("0", response[25]); // Expect 26 lines assertEquals(26, response.length); } else { // Failure expected only expected for the fail on begin // Failure at any later stage and the response headers (including // the 200 response code will already have been sent to the client if (SimpleCometServlet.FAIL_ON_BEGIN.equals(initParam)) { assertEquals("HTTP/1.1 500 Internal Server Error", response[0]); alv.validateAccessLog(1, 500, 0, 1000); } else { assertEquals("HTTP/1.1 200 OK", response[0]); alv.validateAccessLog(1, 200, 0, 5000); } } } /** * Tests if the Comet connection is closed if the Tomcat connector is * stopped. */ @Test public void testCometConnectorStop() throws Exception { if (!isCometSupported()) { return; } // Setup Tomcat instance Tomcat tomcat = getTomcatInstance(); Context root = tomcat.addContext("", TEMP_DIR); Tomcat.addServlet(root, "comet", new SimpleCometServlet()); root.addServletMapping("/", "comet"); tomcat.start(); // Create connection to Comet servlet final Socket socket = SocketFactory.getDefault().createSocket("localhost", getPort()); socket.setSoTimeout(10000); final OutputStream os = socket.getOutputStream(); String requestLine = "POST http://localhost:" + getPort() + "/ HTTP/1.1\r\n"; os.write(requestLine.getBytes()); os.write("transfer-encoding: chunked\r\n".getBytes()); os.write("\r\n".getBytes()); PingWriterThread writeThread = new PingWriterThread(100, os); writeThread.start(); InputStream is = socket.getInputStream(); ResponseReaderThread readThread = new ResponseReaderThread(is); readThread.start(); // Allow the first couple of PING messages to be written Thread.sleep(3000); tomcat.getConnector().stop(); // Wait for the write thread to stop int count = 0; while (writeThread.isAlive() && count < 50) { Thread.sleep(100); count ++; } // Wait for the read thread to stop while (readThread.isAlive() && count < 50) { Thread.sleep(100); count ++; } // Destroy the connector once the executor has sent the end event tomcat.getConnector().destroy(); // Write should trigger an exception once the connector stops since the // socket should be closed assertNotNull("No exception in writing thread", writeThread.getException()); // Termination of Read thread varies by platform and protocol // In all cases, the END event should be sent. assertTrue("Comet END event not received", readThread.getResponse().contains("Client: END")); } private boolean isCometSupported() { String protocol = getTomcatInstance().getConnector().getProtocolHandlerClassName(); if (protocol.indexOf("Nio") == -1 && protocol.indexOf("Apr") == -1) { return false; } else { return true; } } private static class SimpleCometServlet extends HttpServlet implements CometProcessor { private static final long serialVersionUID = 1L; public static final String FAIL_ON_BEGIN = "failOnBegin"; public static final String FAIL_ON_READ = "failOnRead"; public static final String FAIL_ON_END = "failOnEnd"; private boolean failOnBegin = false; private boolean failOnRead = false; private boolean failOnEnd = false; @Override public void init() throws ServletException { failOnBegin = Boolean.valueOf(getServletConfig().getInitParameter( FAIL_ON_BEGIN)).booleanValue(); failOnRead = Boolean.valueOf(getServletConfig().getInitParameter( FAIL_ON_READ)).booleanValue(); failOnEnd = Boolean.valueOf(getServletConfig().getInitParameter( FAIL_ON_END)).booleanValue(); } @Override public void event(CometEvent event) throws IOException, ServletException { HttpServletRequest request = event.getHttpServletRequest(); HttpServletResponse response = event.getHttpServletResponse(); HttpSession session = request.getSession(true); session.setMaxInactiveInterval(30); if (event.getEventType() == EventType.BEGIN) { if (failOnBegin) { throw new IOException("Fail on begin"); } response.setContentType("text/plain"); response.getWriter().print("BEGIN" + "\r\n"); } else if (event.getEventType() == EventType.READ) { if (failOnRead) { throw new IOException("Fail on read"); } InputStream is = request.getInputStream(); int count = 0; while (is.available() > 0) { is.read(); count ++; } String msg = "READ: " + count + " bytes"; response.getWriter().print("Client: " + msg + "\r\n"); } else if (event.getEventType() == EventType.END) { if (failOnEnd) { throw new IOException("Fail on end"); } String msg = "END"; response.getWriter().print("Client: " + msg + "\r\n"); event.close(); } else { response.getWriter().print(event.getEventSubType() + "\r\n"); event.close(); } response.getWriter().flush(); } } private static class PingWriterThread extends Thread { private final int pingCount; private final OutputStream os; private volatile Exception e = null; public PingWriterThread(int pingCount, OutputStream os) { this.pingCount = pingCount; this.os = os; } public Exception getException() { return e; } @Override public void run() { try { for (int i = 0; i < pingCount; i++) { os.write("4\r\n".getBytes()); os.write("PING\r\n".getBytes()); os.flush(); Thread.sleep(1000); } os.write("0\r\n".getBytes()); os.write("\r\n".getBytes()); } catch (Exception e) { this.e = e; } } } private static class ResponseReaderThread extends Thread { private final InputStream is; private final StringBuilder response = new StringBuilder(); public ResponseReaderThread(InputStream is) { this.is = is; } public String getResponse() { return response.toString(); } @Override public void run() { try { int c = is.read(); while (c > -1) { response.append((char) c); c = is.read(); } } catch (Exception e) { // Ignore } } } private static class AsyncCometCloseValve extends ValveBase { @Override public void invoke(Request request, Response response) throws IOException, ServletException { CometEventImpl event = new CometEventImpl(request, response); getNext().invoke(request, response); if (request.isComet()) { Thread t = new AsyncCometCloseThread(event); t.start(); } } } private static class AsyncCometCloseThread extends Thread { private final CometEvent event; public AsyncCometCloseThread(CometEvent event) { this.event = event; } @Override public void run() { try { Thread.sleep(2000); event.close(); } catch (Exception e) { // Test should fail. Report what went wrong. e.printStackTrace(); } } } }
Change TestCometProcessor#testCometConnectorStop() to test whether END event was processed by servlet and write other information as a log message. The old test was not 100% reliable and fails for me with APR. It needs more investigation. git-svn-id: 79cef5a5a257cc9dbe40a45ac190115b4780e2d0@1228918 13f79535-47bb-0310-9956-ffa450edef68
test/org/apache/catalina/comet/TestCometProcessor.java
Change TestCometProcessor#testCometConnectorStop() to test whether END event was processed by servlet and write other information as a log message. The old test was not 100% reliable and fails for me with APR. It needs more investigation.
<ide><path>est/org/apache/catalina/comet/TestCometProcessor.java <ide> import javax.servlet.http.HttpSession; <ide> <ide> import static org.junit.Assert.assertEquals; <del>import static org.junit.Assert.assertNotNull; <ide> import static org.junit.Assert.assertTrue; <ide> import static org.junit.Assert.fail; <ide> <ide> public void testAsyncClose() throws Exception { <ide> <ide> if (!isCometSupported()) { <add> log.info("This test is skipped, because this connector does not support Comet."); <ide> return; <ide> } <ide> <ide> <ide> private void doSimpleCometTest(String initParam) throws Exception { <ide> if (!isCometSupported()) { <add> log.info("This test is skipped, because this connector does not support Comet."); <ide> return; <ide> } <ide> <ide> public void testCometConnectorStop() throws Exception { <ide> <ide> if (!isCometSupported()) { <add> log.info("This test is skipped, because this connector does not support Comet."); <ide> return; <ide> } <ide> <ide> // Setup Tomcat instance <add> SimpleCometServlet servlet = new SimpleCometServlet(); <ide> Tomcat tomcat = getTomcatInstance(); <ide> Context root = tomcat.addContext("", TEMP_DIR); <del> Tomcat.addServlet(root, "comet", new SimpleCometServlet()); <add> Tomcat.addServlet(root, "comet", servlet); <ide> root.addServletMapping("/", "comet"); <ide> tomcat.start(); <ide> <ide> <ide> tomcat.getConnector().stop(); <ide> <del> // Wait for the write thread to stop <ide> int count = 0; <del> while (writeThread.isAlive() && count < 50) { <del> Thread.sleep(100); <del> count ++; <del> } <del> <ide> // Wait for the read thread to stop <ide> while (readThread.isAlive() && count < 50) { <ide> Thread.sleep(100); <ide> count ++; <ide> } <ide> <add> // Wait for the write thread to stop <add> count = 0; <add> while (writeThread.isAlive() && count < 50) { <add> Thread.sleep(100); <add> count ++; <add> } <add> <ide> // Destroy the connector once the executor has sent the end event <ide> tomcat.getConnector().destroy(); <ide> <del> // Write should trigger an exception once the connector stops since the <del> // socket should be closed <del> assertNotNull("No exception in writing thread", <del> writeThread.getException()); <del> <del> // Termination of Read thread varies by platform and protocol <del> // In all cases, the END event should be sent. <add> String[] response = readThread.getResponse().split("\r\n"); <add> String lastMessage = ""; <add> String lastResponseLine = ""; <add> for (int i = response.length; --i >= 0;) { <add> lastMessage = response[i]; <add> if (lastMessage.startsWith("Client:")) { <add> break; <add> } <add> } <add> for (int i = response.length; --i >= 0;) { <add> lastResponseLine = response[i]; <add> if (lastResponseLine.length() > 0) { <add> break; <add> } <add> } <add> StringBuilder status = new StringBuilder(); <add> // Expected, but is not 100% reliable: <add> // WriteThread exception: java.net.SocketException <add> // ReaderThread exception: null <add> // Last message: [Client: END] <add> // Last response line: [0] (empty chunk) <add> // Last comet event: [END] <add> status.append("Status:"); <add> status.append("\nWriterThread exception: " + writeThread.getException()); <add> status.append("\nReaderThread exception: " + readThread.getException()); <add> status.append("\nLast message: [" + lastMessage + "]"); <add> status.append("\nLast response line: [" + lastResponseLine + "]"); <add> status.append("\nLast comet event: [" + servlet.getLastEvent() + "]"); <add> if (writeThread.getException() == null <add> || !lastMessage.contains("Client: END") <add> || !EventType.END.equals(servlet.getLastEvent())) { <add> log.error(status); <add> } else { <add> log.info(status); <add> } <ide> assertTrue("Comet END event not received", <del> readThread.getResponse().contains("Client: END")); <add> EventType.END.equals(servlet.getLastEvent())); <ide> } <ide> <ide> private boolean isCometSupported() { <ide> private boolean failOnRead = false; <ide> private boolean failOnEnd = false; <ide> <add> private volatile EventType lastEvent; <add> <add> public EventType getLastEvent() { <add> return lastEvent; <add> } <ide> <ide> @Override <ide> public void init() throws ServletException { <ide> <ide> HttpSession session = request.getSession(true); <ide> session.setMaxInactiveInterval(30); <add> <add> lastEvent = event.getEventType(); <ide> <ide> if (event.getEventType() == EventType.BEGIN) { <ide> if (failOnBegin) { <ide> private final InputStream is; <ide> private final StringBuilder response = new StringBuilder(); <ide> <add> private volatile Exception e = null; <add> <ide> public ResponseReaderThread(InputStream is) { <ide> this.is = is; <add> } <add> <add> public Exception getException() { <add> return e; <ide> } <ide> <ide> public String getResponse() { <ide> c = is.read(); <ide> } <ide> } catch (Exception e) { <del> // Ignore <add> this.e = e; <ide> } <ide> } <ide> }
JavaScript
mit
281087c5d037a984b066848cea23d4be5ec455b8
0
danethurber/webpack-manifest-plugin,SeeThruHead/webpack-manifest-plugin
var path = require('path'); var _ = require('lodash'); function ManifestPlugin(opts) { this.opts = _.assign({ basePath: '', fileName: 'manifest.json', stripSrc: null, transformExtensions: /^(gz|map)$/i, imageExtensions: /^(jpe?g|png|gif|svg)(\.|$)/i, readFile: false }, opts || {}); } ManifestPlugin.prototype.getFileType = function(str) { str = str.replace(/\?.*/, ''); var split = str.split('.'); var ext = split.pop(); if (this.opts.transformExtensions.test(ext)) { ext = split.pop() + '.' + ext; } return ext; }; ManifestPlugin.prototype.apply = function(compiler) { var outputName = this.opts.fileName; compiler.plugin('emit', function(compilation, compileCallback){ var stats = compilation.getStats().toJson(); var assetsByChunkName = stats.assetsByChunkName; var manifest; if (this.opts.readFile && fs.existsSync(outputPath)) { manifest = JSON.parse(fs.readFileSync(outputPath).toString()); } else { manifest = {}; } _.merge(manifest, Object.keys(assetsByChunkName).reduce(function(reducedObj, srcName){ var chunkName = assetsByChunkName[srcName]; srcName = srcName.replace(this.opts.stripSrc, ''); if(Array.isArray(chunkName)) { var tmp = chunkName.reduce(function(prev, item){ prev[srcName + '.' + this.getFileType(item)] = item; return prev; }.bind(this), {}); return _.merge(reducedObj, tmp); } else { reducedObj[srcName + '.' + this.getFileType(chunkName)] = chunkName; return reducedObj; } }.bind(this), {})); // images don't show up in assetsByChunkName. // we're getting them this way; _.merge(manifest, stats.assets.reduce(function(prevObj, asset){ var ext = this.getFileType(asset.name); if (this.opts.imageExtensions.test(ext)) { var trimmedName = asset.name.split('.').shift(); prevObj[trimmedName + '.' + ext] = asset.name; } return prevObj; }.bind(this), {})); // Append optional basepath onto all references. // This allows output path to be reflected in the manifest. if (this.opts.basePath) { manifest = _.reduce(manifest, function(memo, value, key) { memo[this.opts.basePath + key] = this.opts.basePath + value; return memo; }.bind(this), {}); } var json = JSON.stringify(manifest, null, 2); compilation.assets[outputName] = { source: function() { return json; }, size: function() { return json.length; } }; compileCallback() }.bind(this)); }; module.exports = ManifestPlugin;
lib/plugin.js
var _ = require('lodash'); function ManifestPlugin(opts) { this.opts = _.assign({ basePath: '', fileName: 'manifest.json', stripSrc: null, transformExtensions: /^(gz|map)$/i, imageExtensions: /^(jpe?g|png|gif|svg)(\.|$)/i, readFile: false }, opts || {}); } ManifestPlugin.prototype.getFileType = function(str) { str = str.replace(/\?.*/, ''); var split = str.split('.'); var ext = split.pop(); if (this.opts.transformExtensions.test(ext)) { ext = split.pop() + '.' + ext; } return ext; }; ManifestPlugin.prototype.apply = function(compiler) { var outputName = this.opts.fileName; compiler.plugin('emit', function(compilation, compileCallback){ var stats = compilation.getStats().toJson(); var assetsByChunkName = stats.assetsByChunkName; var manifest; if (this.opts.readFile && fs.existsSync(outputPath)) { manifest = JSON.parse(fs.readFileSync(outputPath).toString()); } else { manifest = {}; } _.merge(manifest, Object.keys(assetsByChunkName).reduce(function(reducedObj, srcName){ var chunkName = assetsByChunkName[srcName]; srcName = srcName.replace(this.opts.stripSrc, ''); if(Array.isArray(chunkName)) { var tmp = chunkName.reduce(function(prev, item){ prev[srcName + '.' + this.getFileType(item)] = item; return prev; }.bind(this), {}); return _.merge(reducedObj, tmp); } else { reducedObj[srcName + '.' + this.getFileType(chunkName)] = chunkName; return reducedObj; } }.bind(this), {})); // images don't show up in assetsByChunkName. // we're getting them this way; _.merge(manifest, stats.assets.reduce(function(prevObj, asset){ var ext = this.getFileType(asset.name); if (this.opts.imageExtensions.test(ext)) { var trimmedName = asset.name.split('.').shift(); prevObj[trimmedName + '.' + ext] = asset.name; } return prevObj; }.bind(this), {})); // Append optional basepath onto all references. // This allows output path to be reflected in the manifest. if (this.opts.basePath) { manifest = _.reduce(manifest, function(memo, value, key) { memo[this.opts.basePath + key] = this.opts.basePath + value; return memo; }.bind(this), {}); } var json = JSON.stringify(manifest, null, 2); compilation.assets[outputName] = { source: function() { return json; }, size: function() { return json.length; } }; compileCallback() }.bind(this)); }; module.exports = ManifestPlugin;
require('path')
lib/plugin.js
require('path')
<ide><path>ib/plugin.js <add>var path = require('path'); <ide> var _ = require('lodash'); <ide> <ide> function ManifestPlugin(opts) {
JavaScript
mit
9d3c81075fd41b6b78c04ad950ac10e91a993943
0
akabekobeko/examples-electron,akabekobeko/examples-electron,akabekobeko/examples-electron
import Path from 'path'; import BrowserWindow from 'browser-window'; import Util from '../common/Util.js'; import { IPCKeys } from '../common/Constants.js'; /** * Manage the window. */ export default class WindowManager { /** * Initialize instance. * * @param {Main} context Application context. */ constructor( context ) { /** * Application context. * @type {Main} */ this._context = context; /** * Collection of a managed window. * @type {Map.<String, BrowserWindow>} */ this._windows = new Map(); /** * About dialog. * @type {BrowserWindow} */ this._aboutDialog = null; context.ipc.on( IPCKeys.RequestCreateNewWindow, this._onRequestCreateNewWindow.bind( this ) ); context.ipc.on( IPCKeys.RequestSendMessage, this._onRequestSendMessage.bind( this ) ); context.ipc.on( IPCKeys.RequestGetWindowIDs, this._onRequestGetWindowIDs.bind( this ) ); } /** * Reload the focused window, For debug. */ reload() { const w = BrowserWindow.getFocusedWindow(); if( w ) { w.reload(); } } /** * Switch the display of the developer tools window at focused window, For debug. */ toggleDevTools() { const w = BrowserWindow.getFocusedWindow(); if( w ) { w.toggleDevTools(); } } /** * Create a new window. * * @return {BrowserWindow} Created window. */ createNewWindow() { const w = new BrowserWindow( { width: 400, height: 400, minWidth: 400, minHeight: 400, resizable: true } ); const id = w.id; w.on( 'closed', () => { if( DEBUG ) { Util.log( 'Window was closed, id = ' + id ); } // Unregister this._windows.delete( id ); this._notifyUpdateWindowIDs( id ); if( this._windows.size === 0 && this._aboutDialog ) { this._aboutDialog.close(); } } ); const filePath = Path.join( __dirname, 'window-main.html' ); w.loadURL( 'file://' + filePath + '#' + w.id ); this._windows.set( id, w ); return w; } /** * Show the about application window. */ createAboutWindow() { if( this._aboutDialog ) { return; } const w = new BrowserWindow( { width: 400, heigh: 256, resizable: false, alwaysOnTop: true } ); w.setMenu( null ); w.on( 'closed', () => { if( DEBUG ) { Util.log( 'The about application window was closed.' ); } this._aboutDialog = null; } ); const filePath = Path.join( __dirname, 'window-about.html' ); w.loadURL( 'file://' + filePath ); this._aboutDialog = w; } /** * Notify that the window ID list has been updated. * * @param {Number} excludeID Exclude ID. */ _notifyUpdateWindowIDs( excludeID ) { const windowIDs = []; for( let key of this._windows.keys() ) { windowIDs.push( key ); } this._windows.forEach( ( w ) => { if( w.id === excludeID ) { return; } w.webContents.send( IPCKeys.UpdateWindowIDs, windowIDs ); } ); } /** * Occurs when a show new window requested. * * @param {IPCEvent} ev Event data. */ _onRequestCreateNewWindow( ev ) { const createdWindow = this.createNewWindow(); ev.sender.send( IPCKeys.FinishCreateNewWindow ); this._notifyUpdateWindowIDs( createdWindow.id ); } /** * Occurs when a send message requested. * * @param {IPCEvent} ev Event data. * @param {Number} id Target window's identifier. * @param {String} message Message. */ _onRequestSendMessage( ev, id, message ) { const w = this._windows.get( id ); if( w ) { w.webContents.send( IPCKeys.UpdateMessage, message ); } ev.sender.send( IPCKeys.FinishSendMessage ); } /** * Occurs when a get window identifiers requested. * * @param {IPCEvent} ev Event data. */ _onRequestGetWindowIDs( ev ) { const windowIDs = Array.from( this._windows.keys() ); ev.sender.send( IPCKeys.FinishGetWindowIDs, windowIDs ); } }
multiple-windows/src/js/main/WindowManager.js
import Path from 'path'; import BrowserWindow from 'browser-window'; import Util from '../common/Util.js'; import { IPCKeys } from '../common/Constants.js'; /** * Manage the window. */ export default class WindowManager { /** * Initialize instance. * * @param {Main} context Application context. */ constructor( context ) { /** * Application context. * @type {Main} */ this._context = context; /** * Collection of a managed window. * @type {Map.<String, BrowserWindow>} */ this._windows = new Map(); /** * About dialog. * @type {BrowserWindow} */ this._aboutDialog = null; context.ipc.on( IPCKeys.RequestCreateNewWindow, this._onRequestCreateNewWindow.bind( this ) ); context.ipc.on( IPCKeys.RequestSendMessage, this._onRequestSendMessage.bind( this ) ); context.ipc.on( IPCKeys.RequestGetWindowIDs, this._onRequestGetWindowIDs.bind( this ) ); } /** * Reload the focused window, For debug. */ reload() { const w = BrowserWindow.getFocusedWindow(); if( w ) { w.reload(); } } /** * Switch the display of the developer tools window at focused window, For debug. */ toggleDevTools() { const w = BrowserWindow.getFocusedWindow(); if( w ) { w.toggleDevTools(); } } /** * Create a new window. * * @return {BrowserWindow} Created window. */ createNewWindow() { const w = new BrowserWindow( { width: 400, height: 400, minWidth: 400, minHeight: 400, resizable: true } ); const id = w.id; w.on( 'closed', () => { if( DEBUG ) { Util.log( 'Window was closed, id = ' + id ); } // Unregister this._windows.delete( id ); if( this._windows.size === 0 && this._aboutDialog ) { this._aboutDialog.close(); } } ); const filePath = Path.join( __dirname, 'window-main.html' ); w.loadURL( 'file://' + filePath + '#' + w.id ); this._windows.set( id, w ); return w; } /** * Show the about application window. */ createAboutWindow() { if( this._aboutDialog ) { return; } const w = new BrowserWindow( { width: 400, heigh: 256, resizable: false, alwaysOnTop: true } ); w.setMenu( null ); w.on( 'closed', () => { if( DEBUG ) { Util.log( 'The about application window was closed.' ); } this._aboutDialog = null; } ); const filePath = Path.join( __dirname, 'window-about.html' ); w.loadURL( 'file://' + filePath ); this._aboutDialog = w; } /** * Occurs when a show new window requested. * * @param {IPCEvent} ev Event data. */ _onRequestCreateNewWindow( ev ) { const createdWindow = this.createNewWindow(); ev.sender.send( IPCKeys.FinishCreateNewWindow ); const windowIDs = []; for( let key of this._windows.keys() ) { windowIDs.push( key ); } this._windows.forEach( ( w ) => { // Because it may not receive the message, explicit request ( RequestGetWindowIDs ) later if( w.id === createdWindow.id ) { return; } w.webContents.send( IPCKeys.UpdateWindowIDs, windowIDs ); } ); } /** * Occurs when a send message requested. * * @param {IPCEvent} ev Event data. * @param {Number} id Target window's identifier. * @param {String} message Message. */ _onRequestSendMessage( ev, id, message ) { const w = this._windows.get( id ); if( w ) { w.webContents.send( IPCKeys.UpdateMessage, message ); } ev.sender.send( IPCKeys.FinishSendMessage ); } /** * Occurs when a get window identifiers requested. * * @param {IPCEvent} ev Event data. */ _onRequestGetWindowIDs( ev ) { const windowIDs = Array.from( this._windows.keys() ); ev.sender.send( IPCKeys.FinishGetWindowIDs, windowIDs ); } }
#66 Again commit because fixes were missing
multiple-windows/src/js/main/WindowManager.js
#66 Again commit because fixes were missing
<ide><path>ultiple-windows/src/js/main/WindowManager.js <ide> <ide> // Unregister <ide> this._windows.delete( id ); <add> this._notifyUpdateWindowIDs( id ); <ide> <ide> if( this._windows.size === 0 && this._aboutDialog ) { <ide> this._aboutDialog.close(); <ide> } <ide> <ide> /** <add> * Notify that the window ID list has been updated. <add> * <add> * @param {Number} excludeID Exclude ID. <add> */ <add> _notifyUpdateWindowIDs( excludeID ) { <add> const windowIDs = []; <add> for( let key of this._windows.keys() ) { <add> windowIDs.push( key ); <add> } <add> <add> this._windows.forEach( ( w ) => { <add> if( w.id === excludeID ) { return; } <add> <add> w.webContents.send( IPCKeys.UpdateWindowIDs, windowIDs ); <add> } ); <add> } <add> <add> /** <ide> * Occurs when a show new window requested. <ide> * <ide> * @param {IPCEvent} ev Event data. <ide> const createdWindow = this.createNewWindow(); <ide> ev.sender.send( IPCKeys.FinishCreateNewWindow ); <ide> <del> const windowIDs = []; <del> for( let key of this._windows.keys() ) { <del> windowIDs.push( key ); <del> } <del> <del> this._windows.forEach( ( w ) => { <del> // Because it may not receive the message, explicit request ( RequestGetWindowIDs ) later <del> if( w.id === createdWindow.id ) { return; } <del> <del> w.webContents.send( IPCKeys.UpdateWindowIDs, windowIDs ); <del> } ); <add> this._notifyUpdateWindowIDs( createdWindow.id ); <ide> } <ide> <ide> /**
Java
mit
3b9e567df22828fb45dc3e72849e27616bab4e90
0
ncomet/devoxxfr2017-bytebuddy
package domain; import static net.bytebuddy.implementation.FixedValue.value; import static net.bytebuddy.implementation.MethodDelegation.to; import static net.bytebuddy.matcher.ElementMatchers.*; import static org.assertj.core.api.Assertions.assertThat; import interceptors.StrangeFeelingInterceptor; import net.bytebuddy.ByteBuddy; import net.bytebuddy.dynamic.loading.ClassLoadingStrategy; import org.mockito.Mockito; import org.testng.annotations.Test; import frameworks.mock.Cosmockpolitan; import interceptors.GetterSetterInterceptor; import java.util.Collections; /** * LECTRA * BuddyTests class * @author n.comet */ public class BuddyTests { @Test public void java() { final Cat cat = new Cat(); cat.setName("Garfield"); cat.feed("croquettes"); cat.feed("patée"); assertThat(cat.getStomach()).containsExactly("croquettes", "patée"); assertThat(cat.getName()).isEqualTo("Garfield"); } @Test public void buddyIntro() throws IllegalAccessException, InstantiationException { final Cat cat = new ByteBuddy() .subclass(Cat.class) /*.method(named("getStomach")) .intercept(to(StrangeFeelingInterceptor.class))*/ .make() .load(getClass().getClassLoader()) .getLoaded().newInstance(); cat.setName("Garfield"); cat.feed("croquettes"); cat.feed("patée"); assertThat(cat.getStomach()).containsExactly("croquettes", "patée"); assertThat(cat.getName()).isEqualTo("Garfield"); } @Test public void interceptors() throws Exception { final Cat cat = new ByteBuddy() .subclass(Cat.class) .method(isGetter().or(isSetter())) .intercept(to(GetterSetterInterceptor.class)) .make().load(getClass().getClassLoader()) .getLoaded().newInstance(); cat.setName("Felix"); assertThat(cat.getName()).isEqualTo("Felix"); } @Test public void classloadingStrategy() throws Exception { final Cat cat = new ByteBuddy() .subclass(Cat.class) .method(named("getStomach")) .intercept(value(Collections.singleton("NotToday"))) .make().load(getClass().getClassLoader(), ClassLoadingStrategy.Default.WRAPPER) .getLoaded().newInstance(); assertThat(cat.getStomach()).containsExactly("NotToday"); } @Test public void mockito() throws Exception { final Cat mock = Mockito.mock(Cat.class); mock.setName("Felix"); assertThat(mock.getName()).isNull(); } @Test public void cosmockpolitan() throws Exception { final Cat mock = Cosmockpolitan.mock(Cat.class); mock.setName("Felix"); assertThat(mock.getName()).isNull(); } }
project/src/test/java/domain/BuddyTests.java
package domain; import static net.bytebuddy.implementation.MethodDelegation.to; import static net.bytebuddy.matcher.ElementMatchers.*; import static org.assertj.core.api.Assertions.assertThat; import interceptors.StrangeFeelingInterceptor; import net.bytebuddy.ByteBuddy; import net.bytebuddy.dynamic.loading.ClassLoadingStrategy; import net.bytebuddy.implementation.FixedValue; import org.mockito.Mockito; import org.testng.annotations.Test; import frameworks.mock.Cosmockpolitan; import interceptors.GetterSetterInterceptor; import java.util.Collections; /** * LECTRA * BuddyTests class * @author n.comet */ public class BuddyTests { @Test public void java() { final Cat cat = new Cat(); cat.setName("Garfield"); cat.feed("croquettes"); cat.feed("patée"); assertThat(cat.getStomach()).containsExactly("croquettes", "patée"); assertThat(cat.getName()).isEqualTo("Garfield"); } @Test public void buddyIntro() throws IllegalAccessException, InstantiationException { final Cat cat = new ByteBuddy() .subclass(Cat.class) /*.method(named("getStomach")) .intercept(to(StrangeFeelingInterceptor.class))*/ .make() .load(getClass().getClassLoader()) .getLoaded().newInstance(); cat.setName("Garfield"); cat.feed("croquettes"); cat.feed("patée"); assertThat(cat.getStomach()).containsExactly("croquettes", "patée"); assertThat(cat.getName()).isEqualTo("Garfield"); } @Test public void interceptors() throws Exception { final Cat cat = new ByteBuddy() .subclass(Cat.class) .method(isGetter().or(isSetter())) .intercept(to(GetterSetterInterceptor.class)) .make().load(getClass().getClassLoader()) .getLoaded().newInstance(); cat.setName("Felix"); assertThat(cat.getName()).isEqualTo("Felix"); } @Test public void classloadingStrategy() throws Exception { final Cat cat = new ByteBuddy() .subclass(Cat.class) .method(named("getStomach")) .intercept(FixedValue.value(Collections.singleton("NotToday"))) .make().load(getClass().getClassLoader(), ClassLoadingStrategy.Default.WRAPPER) .getLoaded().newInstance(); assertThat(cat.getStomach()).containsExactly("NotToday"); } @Test public void mockito() throws Exception { final Cat mock = Mockito.mock(Cat.class); mock.setName("Felix"); assertThat(mock.getName()).isNull(); } @Test public void cosmockpolitan() throws Exception { final Cat mock = Cosmockpolitan.mock(Cat.class); mock.setName("Felix"); assertThat(mock.getName()).isNull(); } }
static import for FixedValue.value()
project/src/test/java/domain/BuddyTests.java
static import for FixedValue.value()
<ide><path>roject/src/test/java/domain/BuddyTests.java <ide> package domain; <ide> <add>import static net.bytebuddy.implementation.FixedValue.value; <ide> import static net.bytebuddy.implementation.MethodDelegation.to; <ide> import static net.bytebuddy.matcher.ElementMatchers.*; <ide> import static org.assertj.core.api.Assertions.assertThat; <ide> import net.bytebuddy.ByteBuddy; <ide> <ide> import net.bytebuddy.dynamic.loading.ClassLoadingStrategy; <del>import net.bytebuddy.implementation.FixedValue; <ide> import org.mockito.Mockito; <ide> import org.testng.annotations.Test; <ide> <ide> final Cat cat = new ByteBuddy() <ide> .subclass(Cat.class) <ide> .method(named("getStomach")) <del> .intercept(FixedValue.value(Collections.singleton("NotToday"))) <add> .intercept(value(Collections.singleton("NotToday"))) <ide> .make().load(getClass().getClassLoader(), ClassLoadingStrategy.Default.WRAPPER) <ide> .getLoaded().newInstance(); <ide>
JavaScript
mit
16a55ec5c9177d4cf493d831ac4925747f9fef7f
0
xml3d/shade.js
(function (ns) { var walk = require('estraverse'); var Syntax = walk.Syntax; var ANNO = require("../../base/annotation.js").ANNO; var interfaces = require("../../interfaces.js"); var TYPES = interfaces.TYPES, KINDS = interfaces.OBJECT_KINDS; function getConstructor(kind){ switch(kind){ case KINDS.FLOAT2: return "Shade.Vec2"; break; case KINDS.FLOAT3: return "Shade.Vec3"; break; case KINDS.FLOAT4: return "Shade.Vec4"; break; case KINDS.MATRIX3: return "Shade.Mat3"; break; case KINDS.MATRIX4: return "Shade.Mat4"; break; default: throw "Unsupported object kind in uniform expression argument: " + kind; } } function isMathCall(node) { return (node.callee.type === Syntax.MemberExpression && node.callee.object.type === Syntax.Identifier && node.callee.object.name === "Math"); } function isVecMathCall(node) { if(!isMathCall(node)) return false; var firstArgument = ANNO(node.arguments[0]); return firstArgument.isVector(); } var leaveVisitor = function (node, parent, variables, controller) { if (node.type == Syntax.MemberExpression) { var object = ANNO(node.object); if (node.object.type == Syntax.Identifier && object.isUniformExpression()) { if(variables.hasOwnProperty(node.object.name)) { //console.log("Found: " + node.object.name, variables[node.object.name]); node.object = variables[node.object.name].code; } } if (object.isGlobal() && node.property.type == Syntax.Identifier) { var property = ANNO(node.property); if(property.isObject()){ // Is the accessed parameter is a vector or matrix , we have to // wrap the typed array in the respective constructor var constructor = getConstructor(property.getKind()); return { type: Syntax.NewExpression, callee: { type: Syntax.Identifier, name: constructor}, arguments: [node] } } else if((parent == node) || parent.type != Syntax.MemberExpression){ // Is the accessed parameter is a scalar value, we have to // access the first entry of the input array return { type: Syntax.MemberExpression, computed: true, object: node, property: { type: Syntax.Literal, value: 0 } } } } } if (node.type == Syntax.CallExpression) { if (isVecMathCall(node)) { node.callee.object.name = "Math"; } } if (node.type == Syntax.Identifier) { if (~[Syntax.MemberExpression, Syntax.FunctionDeclaration, Syntax.VariableDeclarator].indexOf(parent.type)) return; if (parent.type == Syntax.NewExpression && parent.callee == node) return; // Not a variable on the right side if (parent.type == Syntax.AssignmentExpression && parent.left == node) return; if(variables.hasOwnProperty(node.name)) { //console.log("Found: " + node.name, this[node.name]); var code = variables[node.name].code; return code; } } if (node.type == Syntax.NewExpression) { if (node.callee.type == Syntax.Identifier) { var name = node.callee.name; switch(name) { case "Vec2": case "Vec3": case "Vec4": node.callee.name = "Shade." + name; break; } } } if (node.type == Syntax.ReturnStatement) { var anno = ANNO(node.argument); if(anno.isObject()){ node.argument = { type: Syntax.CallExpression, callee: { type: Syntax.MemberExpression, object: node.argument, property: {type: Syntax.Identifier, name: "_toFloatArray" } }, arguments: [] }; return node; } } } ns.transformUniformSetter = function (ast, variables) { return walk.replace(ast, { leave: function(node, parent) { return leaveVisitor(node, parent, variables, this); }}); }; }(exports));
src/analyze/uniformExpressions/uniformSetterTransformation.js
(function (ns) { var walk = require('estraverse'); var Syntax = walk.Syntax; var ANNO = require("../../base/annotation.js").ANNO; var interfaces = require("../../interfaces.js"); var TYPES = interfaces.TYPES, KINDS = interfaces.OBJECT_KINDS; function getConstructor(kind){ switch(kind){ case KINDS.FLOAT2: return "Shade.Vec2"; break; case KINDS.FLOAT3: return "Shade.Vec3"; break; case KINDS.FLOAT4: return "Shade.Vec4"; break; case KINDS.MATRIX3: return "Shade.Mat3"; break; case KINDS.MATRIX4: return "Shade.Mat4"; break; default: throw "Unsupported object kind in uniform expression argument: " + kind; } } function isMathCall(node) { return (node.callee.type === Syntax.MemberExpression && node.callee.object.type === Syntax.Identifier && node.callee.object.name === "Math"); } function isVecMathCall(node) { if(!isMathCall(node)) return false; var firstArgument = ANNO(node.arguments[0]); return firstArgument.isVector(); } var leaveVisitor = function (node, parent, variables, controller) { if (node.type == Syntax.MemberExpression) { var object = ANNO(node.object); if (node.object.type == Syntax.Identifier && object.isUniformExpression()) { if(variables.hasOwnProperty(node.object.name)) { //console.log("Found: " + node.object.name, variables[node.object.name]); node.object = variables[node.object.name].code; } } if (object.isGlobal() && node.property.type == Syntax.Identifier) { var property = ANNO(node.property); if(property.isObject()){ // Is the accessed parameter is a vector or matrix , we have to // wrap the typed array in the respective constructor var constructor = getConstructor(property.getKind()); return { type: Syntax.NewExpression, callee: { type: Syntax.Identifier, name: constructor}, arguments: [node] } } else if((parent == node) || parent.type != Syntax.MemberExpression){ // Is the accessed parameter is a scalar value, we have to // access the first entry of the input array return { type: Syntax.MemberExpression, computed: true, object: node, property: { type: Syntax.Literal, value: 0 } } } } } if (node.type == Syntax.CallExpression) { if (isVecMathCall(node)) { node.callee.object.name = "this.VecMath"; } } if (node.type == Syntax.Identifier) { if (~[Syntax.MemberExpression, Syntax.FunctionDeclaration, Syntax.VariableDeclarator].indexOf(parent.type)) return; if (parent.type == Syntax.NewExpression && parent.callee == node) return; // Not a variable on the right side if (parent.type == Syntax.AssignmentExpression && parent.left == node) return; if(variables.hasOwnProperty(node.name)) { //console.log("Found: " + node.name, this[node.name]); var code = variables[node.name].code; return code; } } if (node.type == Syntax.NewExpression) { if (node.callee.type == Syntax.Identifier) { var name = node.callee.name; switch(name) { case "Vec2": case "Vec3": case "Vec4": node.callee.name = "Shade." + name; break; } } } if (node.type == Syntax.ReturnStatement) { var anno = ANNO(node.argument); if(anno.isObject()){ node.argument = { type: Syntax.CallExpression, callee: { type: Syntax.MemberExpression, object: node.argument, property: {type: Syntax.Identifier, name: "_toFloatArray" } }, arguments: [] }; return node; } } } ns.transformUniformSetter = function (ast, variables) { return walk.replace(ast, { leave: function(node, parent) { return leaveVisitor(node, parent, variables, this); }}); }; }(exports));
Fix Math issue
src/analyze/uniformExpressions/uniformSetterTransformation.js
Fix Math issue
<ide><path>rc/analyze/uniformExpressions/uniformSetterTransformation.js <ide> <ide> if (node.type == Syntax.CallExpression) { <ide> if (isVecMathCall(node)) { <del> node.callee.object.name = "this.VecMath"; <add> node.callee.object.name = "Math"; <ide> } <ide> } <ide>
Java
apache-2.0
45e09e7ea9fdd5e3309a6c0666920d1d7ecb95e1
0
jivesoftware/miru,bruceadowns/miru,jivesoftware/miru,bruceadowns/miru,bruceadowns/miru,jivesoftware/miru,bruceadowns/miru,jivesoftware/miru
package com.jivesoftware.os.miru.stream.plugins.strut; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.MinMaxPriorityQueue; import com.jivesoftware.os.filer.io.api.StackBuffer; import com.jivesoftware.os.miru.api.MiruPartitionCoord; import com.jivesoftware.os.miru.api.activity.schema.MiruFieldDefinition; import com.jivesoftware.os.miru.api.activity.schema.MiruSchema; import com.jivesoftware.os.miru.api.base.MiruTermId; import com.jivesoftware.os.miru.api.query.filter.MiruValue; import com.jivesoftware.os.miru.plugin.bitmap.MiruBitmaps; import com.jivesoftware.os.miru.plugin.context.MiruRequestContext; import com.jivesoftware.os.miru.plugin.index.MiruTermComposer; import com.jivesoftware.os.miru.plugin.solution.MiruAggregateUtil; import com.jivesoftware.os.miru.plugin.solution.MiruAggregateUtil.ConsumeBitmaps; import com.jivesoftware.os.miru.plugin.solution.MiruRequest; import com.jivesoftware.os.miru.plugin.solution.MiruSolutionLog; import com.jivesoftware.os.miru.plugin.solution.MiruSolutionLogLevel; import com.jivesoftware.os.miru.stream.plugins.strut.HotOrNot.Hotness; import com.jivesoftware.os.miru.stream.plugins.strut.StrutModelCache.ModelScore; import com.jivesoftware.os.miru.stream.plugins.strut.StrutModelCache.StrutModel; import com.jivesoftware.os.miru.stream.plugins.strut.StrutQuery.Strategy; import com.jivesoftware.os.mlogger.core.MetricLogger; import com.jivesoftware.os.mlogger.core.MetricLoggerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * */ public class Strut { private static final MetricLogger LOG = MetricLoggerFactory.getLogger(); private final MiruAggregateUtil aggregateUtil = new MiruAggregateUtil(); private final StrutModelCache cache; public Strut(StrutModelCache cache) { this.cache = cache; } public <BM extends IBM, IBM> StrutAnswer yourStuff(String name, MiruPartitionCoord coord, MiruBitmaps<BM, IBM> bitmaps, MiruRequestContext<BM, IBM, ?> requestContext, MiruRequest<StrutQuery> request, Optional<StrutReport> report, ConsumeBitmaps<BM> consumeAnswers, MiruSolutionLog solutionLog) throws Exception { StrutModel model = cache.get(request.tenantId, request.query.catwalkId, request.query.modelId, coord.partitionId.getId(), request.query.catwalkQuery); StackBuffer stackBuffer = new StackBuffer(); MiruSchema schema = requestContext.getSchema(); int pivotFieldId = schema.getFieldId(request.query.constraintField); MiruFieldDefinition pivotFieldDefinition = schema.getFieldDefinition(pivotFieldId); MiruTermComposer termComposer = requestContext.getTermComposer(); String[][] modelFeatureFields = request.query.catwalkQuery.featureFields; String[][] desiredFeatureFields = request.query.featureFields; String[][] featureFields = new String[modelFeatureFields.length][]; for (int i = 0; i < modelFeatureFields.length; i++) { for (int j = 0; j < desiredFeatureFields.length; j++) { if (Arrays.equals(modelFeatureFields[i], desiredFeatureFields[j])) { featureFields[i] = modelFeatureFields[i]; break; } } } int[][] featureFieldIds = new int[featureFields.length][]; for (int i = 0; i < featureFields.length; i++) { String[] featureField = featureFields[i]; if (featureField != null) { featureFieldIds[i] = new int[featureField.length]; for (int j = 0; j < featureField.length; j++) { featureFieldIds[i][j] = requestContext.getSchema().getFieldId(featureField[j]); } } } List<HotOrNot> hotOrNots = new ArrayList<>(request.query.desiredNumberOfResults); float[] thresholds = report.isPresent() ? new float[] { report.get().threshold } : new float[] { 0.5f, 0.2f, 0.08f, 0f }; @SuppressWarnings("unchecked") List<Hotness>[][] features = request.query.includeFeatures ? new List[thresholds.length][] : null; @SuppressWarnings("unchecked") MinMaxPriorityQueue<Scored>[] scored = new MinMaxPriorityQueue[thresholds.length]; for (int i = 0; i < thresholds.length; i++) { if (features != null) { features[i] = new List[featureFields.length]; } scored[i] = MinMaxPriorityQueue .expectedSize(request.query.desiredNumberOfResults) .maximumSize(request.query.desiredNumberOfResults) .create(); } long start = System.currentTimeMillis(); int[] featureCount = { 0 }; double[][] score = new double[thresholds.length][2]; int[] termCount = new int[thresholds.length]; MiruTermId[] currentPivot = { null }; aggregateUtil.gatherFeatures(name, bitmaps, requestContext, consumeAnswers, featureFieldIds, true, (answerTermId, featureId, termIds) -> { featureCount[0]++; if (currentPivot[0] == null || !currentPivot[0].equals(answerTermId)) { if (currentPivot[0] != null) { for (int i = 0; i < thresholds.length; i++) { if (termCount[i] > 0) { List<Hotness>[] scoredFeatures = null; if (request.query.includeFeatures) { scoredFeatures = new List[features[i].length]; System.arraycopy(features[i], 0, scoredFeatures, 0, features[i].length); } scored[i].add(new Scored(currentPivot[0], finalizeScore(score[i], termCount[i], model.getModelCount(), model.getTotalCount(), request.query.strategy), termCount[i], scoredFeatures)); } Arrays.fill(score[i], 0f); termCount[i] = 0; if (request.query.includeFeatures) { Arrays.fill(features[i], null); } } } currentPivot[0] = answerTermId; } ModelScore modelScore = model.score(featureId, termIds); if (modelScore != null) { // if (!Float.isNaN(s) && s > 0.0f) { float s = (float) modelScore.numerator / (float) modelScore.denominator; if (s > 1.0f) { LOG.warn("Encountered score {} > 1.0 for answerTermId:{} featureId:{} termIds:{}", s, answerTermId, featureId, Arrays.toString(termIds)); } //TODO tiered scoring based on thresholds for (int i = 0; i < thresholds.length; i++) { if (s > thresholds[i]) { score(score[i], modelScore, s, model.getModelCount(), model.getTotalCount(), request.query.strategy); termCount[i]++; if (request.query.includeFeatures) { if (features[i][featureId] == null) { features[i][featureId] = Lists.newArrayList(); } MiruValue[] values = new MiruValue[termIds.length]; for (int j = 0; j < termIds.length; j++) { values[j] = new MiruValue(termComposer.decompose(schema, schema.getFieldDefinition(featureFieldIds[featureId][j]), stackBuffer, termIds[j])); } features[i][featureId].add(new Hotness(values, s)); } } } } return true; }, solutionLog, stackBuffer); for (int i = 0; i < thresholds.length; i++) { if (termCount[i] > 0) { scored[i].add(new Scored(currentPivot[0], finalizeScore(score[i], termCount[i], model.getModelCount(), model.getTotalCount(), request.query.strategy), termCount[i], request.query.includeFeatures ? features[i] : null)); } } solutionLog.log(MiruSolutionLogLevel.INFO, "Strut scored {} features in {} ms", featureCount[0], System.currentTimeMillis() - start); float scoredThreshold = 0f; for (int i = 0; i < scored.length; i++) { if (i == scored.length - 1 || scored[i].size() == request.query.desiredNumberOfResults) { for (Scored s : scored[i]) { hotOrNots.add(new HotOrNot(new MiruValue(termComposer.decompose(schema, pivotFieldDefinition, stackBuffer, s.term)), s.score, s.termCount, s.features)); } solutionLog.log(MiruSolutionLogLevel.INFO, "Strut found {} terms at threshold {}", hotOrNots.size(), thresholds[i]); scoredThreshold = thresholds[i]; break; } } boolean resultsExhausted = request.query.timeRange.smallestTimestamp > requestContext.getTimeIndex().getLargestTimestamp(); return new StrutAnswer(hotOrNots, scoredThreshold, resultsExhausted); } private void score(double[] scores, ModelScore nextScore, float s, long modelCount, long totalCount, Strategy strategy) { if (strategy == Strategy.MAX) { scores[0] = Math.max(scores[0], s); } else if (strategy == Strategy.MEAN) { scores[0] += s; } else if (strategy == Strategy.NAIVE_BAYES) { long hits = nextScore.numerator; long misses = nextScore.denominator - nextScore.numerator; long nonModelCount = totalCount - modelCount; if (scores[0] == 0f) { scores[0] = 1f; } scores[0] *= ((float) (1 + hits) / (1 + modelCount)); if (scores[1] == 0f) { scores[1] = 1f; } scores[1] *= ((float) (1 + misses) / (1 + nonModelCount)); } else { throw new UnsupportedOperationException("Strategy not supported: " + strategy); } } private float finalizeScore(double[] score, int termCount, long modelCount, long totalCount, Strategy strategy) { if (strategy == Strategy.MAX) { return (float) score[0]; } else if (strategy == Strategy.MEAN) { return (float) score[0] / termCount; } else if (strategy == Strategy.NAIVE_BAYES) { long nonModelCount = totalCount - modelCount; score[0] *= Math.log((double) (1 + modelCount) / (1 + totalCount)); score[1] *= Math.log((double) (1 + nonModelCount) / (1 + totalCount)); float result = (float) (score[0] / score[1]); return Float.isFinite(result) ? result : Float.isInfinite(result) ? Float.MAX_VALUE : 0f; } else { throw new UnsupportedOperationException("Strategy not supported: " + strategy); } } /*public static void main(String[] args) { float totalActivities = 3_000_000f; float viewedActivities = 10_000f; float[] viewedFeatures = { 7f, 3f, 8f }; float[] nonViewedFeatures = { 3f, 12f, 12f }; // 7/10, 3/15, 8/20 float pViewed1 = (7f / 10_000f) * (3f / 10_000f) * (8f / 10_000f) * (10_000f / 3_000_000f); float pNonViewed1 = (3f / 2_990_000f) * (12f / 2_990_000f) * (12f / 2_990_000f) * (2_990_000f / 3_000_000f); float p1 = (10f / 3_000_000f) * (15f / 3_000_000f) * (20f / 3_000_000f); float pViewed2 = (5f / 15_000f) * (6f / 15_000f) * (10f / 15_000f) * (15_000f / 3_000_000f); float pNonViewed2 = (8f / 2_985_000f) * (2f / 2_985_000f) * (2f / 2_985_000f) * (2_985_000f / 3_000_000f); //System.out.println(pViewed1); //System.out.println(pNonViewed1); System.out.println("pV1: " + pViewed1); System.out.println("pNV1: " + pNonViewed1); System.out.println("p1: " + p1); System.out.println("pV1/p1: " + (pViewed1 / p1)); System.out.println("pNV1/p1: " + (pNonViewed1 / p1)); System.out.println("---"); System.out.println(pViewed2 / pNonViewed2); System.out.println((pViewed1 * pViewed2) / (pNonViewed1 * pNonViewed2)); }*/ static class Scored implements Comparable<Scored> { MiruTermId term; float score; int termCount; List<Hotness>[] features; public Scored(MiruTermId term, float score, int termCount, List<Hotness>[] features) { this.term = term; this.score = score; this.termCount = termCount; this.features = features; } @Override public int compareTo(Scored o) { int c = Float.compare(o.score, score); // reversed if (c != 0) { return c; } return term.compareTo(o.term); } } }
miru-stream-plugins/src/main/java/com/jivesoftware/os/miru/stream/plugins/strut/Strut.java
package com.jivesoftware.os.miru.stream.plugins.strut; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.MinMaxPriorityQueue; import com.jivesoftware.os.filer.io.api.StackBuffer; import com.jivesoftware.os.miru.api.MiruPartitionCoord; import com.jivesoftware.os.miru.api.activity.schema.MiruFieldDefinition; import com.jivesoftware.os.miru.api.activity.schema.MiruSchema; import com.jivesoftware.os.miru.api.base.MiruTermId; import com.jivesoftware.os.miru.api.query.filter.MiruValue; import com.jivesoftware.os.miru.plugin.bitmap.MiruBitmaps; import com.jivesoftware.os.miru.plugin.context.MiruRequestContext; import com.jivesoftware.os.miru.plugin.index.MiruTermComposer; import com.jivesoftware.os.miru.plugin.solution.MiruAggregateUtil; import com.jivesoftware.os.miru.plugin.solution.MiruAggregateUtil.ConsumeBitmaps; import com.jivesoftware.os.miru.plugin.solution.MiruRequest; import com.jivesoftware.os.miru.plugin.solution.MiruSolutionLog; import com.jivesoftware.os.miru.plugin.solution.MiruSolutionLogLevel; import com.jivesoftware.os.miru.stream.plugins.strut.HotOrNot.Hotness; import com.jivesoftware.os.miru.stream.plugins.strut.StrutModelCache.ModelScore; import com.jivesoftware.os.miru.stream.plugins.strut.StrutModelCache.StrutModel; import com.jivesoftware.os.miru.stream.plugins.strut.StrutQuery.Strategy; import com.jivesoftware.os.mlogger.core.MetricLogger; import com.jivesoftware.os.mlogger.core.MetricLoggerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * */ public class Strut { private static final MetricLogger LOG = MetricLoggerFactory.getLogger(); private final MiruAggregateUtil aggregateUtil = new MiruAggregateUtil(); private final StrutModelCache cache; public Strut(StrutModelCache cache) { this.cache = cache; } public <BM extends IBM, IBM> StrutAnswer yourStuff(String name, MiruPartitionCoord coord, MiruBitmaps<BM, IBM> bitmaps, MiruRequestContext<BM, IBM, ?> requestContext, MiruRequest<StrutQuery> request, Optional<StrutReport> report, ConsumeBitmaps<BM> consumeAnswers, MiruSolutionLog solutionLog) throws Exception { StrutModel model = cache.get(request.tenantId, request.query.catwalkId, request.query.modelId, coord.partitionId.getId(), request.query.catwalkQuery); StackBuffer stackBuffer = new StackBuffer(); MiruSchema schema = requestContext.getSchema(); int pivotFieldId = schema.getFieldId(request.query.constraintField); MiruFieldDefinition pivotFieldDefinition = schema.getFieldDefinition(pivotFieldId); MiruTermComposer termComposer = requestContext.getTermComposer(); String[][] modelFeatureFields = request.query.catwalkQuery.featureFields; String[][] desiredFeatureFields = request.query.featureFields; String[][] featureFields = new String[modelFeatureFields.length][]; for (int i = 0; i < modelFeatureFields.length; i++) { for (int j = 0; j < desiredFeatureFields.length; j++) { if (Arrays.equals(modelFeatureFields[i], desiredFeatureFields[j])) { featureFields[i] = modelFeatureFields[i]; break; } } } int[][] featureFieldIds = new int[featureFields.length][]; for (int i = 0; i < featureFields.length; i++) { String[] featureField = featureFields[i]; if (featureField != null) { featureFieldIds[i] = new int[featureField.length]; for (int j = 0; j < featureField.length; j++) { featureFieldIds[i][j] = requestContext.getSchema().getFieldId(featureField[j]); } } } List<HotOrNot> hotOrNots = new ArrayList<>(request.query.desiredNumberOfResults); float[] thresholds = report.isPresent() ? new float[] { report.get().threshold } : new float[] { 0.5f, 0.2f, 0.08f, 0f }; @SuppressWarnings("unchecked") List<Hotness>[][] features = request.query.includeFeatures ? new List[thresholds.length][] : null; @SuppressWarnings("unchecked") MinMaxPriorityQueue<Scored>[] scored = new MinMaxPriorityQueue[thresholds.length]; for (int i = 0; i < thresholds.length; i++) { if (features != null) { features[i] = new List[featureFields.length]; } scored[i] = MinMaxPriorityQueue .expectedSize(request.query.desiredNumberOfResults) .maximumSize(request.query.desiredNumberOfResults) .create(); } long start = System.currentTimeMillis(); int[] featureCount = { 0 }; float[][] score = new float[thresholds.length][2]; int[] termCount = new int[thresholds.length]; MiruTermId[] currentPivot = { null }; aggregateUtil.gatherFeatures(name, bitmaps, requestContext, consumeAnswers, featureFieldIds, true, (answerTermId, featureId, termIds) -> { featureCount[0]++; if (currentPivot[0] == null || !currentPivot[0].equals(answerTermId)) { if (currentPivot[0] != null) { for (int i = 0; i < thresholds.length; i++) { if (termCount[i] > 0) { List<Hotness>[] scoredFeatures = null; if (request.query.includeFeatures) { scoredFeatures = new List[features[i].length]; System.arraycopy(features[i], 0, scoredFeatures, 0, features[i].length); } scored[i].add(new Scored(currentPivot[0], finalizeScore(score[i], termCount[i], model.getModelCount(), model.getTotalCount(), request.query.strategy), termCount[i], scoredFeatures)); } Arrays.fill(score[i], 0f); termCount[i] = 0; if (request.query.includeFeatures) { Arrays.fill(features[i], null); } } } currentPivot[0] = answerTermId; } ModelScore modelScore = model.score(featureId, termIds); if (modelScore != null) { // if (!Float.isNaN(s) && s > 0.0f) { float s = (float) modelScore.numerator / (float) modelScore.denominator; if (s > 1.0f) { LOG.warn("Encountered score {} > 1.0 for answerTermId:{} featureId:{} termIds:{}", s, answerTermId, featureId, Arrays.toString(termIds)); } //TODO tiered scoring based on thresholds for (int i = 0; i < thresholds.length; i++) { if (s > thresholds[i]) { score(score[i], modelScore, s, model.getModelCount(), model.getTotalCount(), request.query.strategy); termCount[i]++; if (request.query.includeFeatures) { if (features[i][featureId] == null) { features[i][featureId] = Lists.newArrayList(); } MiruValue[] values = new MiruValue[termIds.length]; for (int j = 0; j < termIds.length; j++) { values[j] = new MiruValue(termComposer.decompose(schema, schema.getFieldDefinition(featureFieldIds[featureId][j]), stackBuffer, termIds[j])); } features[i][featureId].add(new Hotness(values, s)); } } } } return true; }, solutionLog, stackBuffer); for (int i = 0; i < thresholds.length; i++) { if (termCount[i] > 0) { scored[i].add(new Scored(currentPivot[0], finalizeScore(score[i], termCount[i], model.getModelCount(), model.getTotalCount(), request.query.strategy), termCount[i], request.query.includeFeatures ? features[i] : null)); } } solutionLog.log(MiruSolutionLogLevel.INFO, "Strut scored {} features in {} ms", featureCount[0], System.currentTimeMillis() - start); float scoredThreshold = 0f; for (int i = 0; i < scored.length; i++) { if (i == scored.length - 1 || scored[i].size() == request.query.desiredNumberOfResults) { for (Scored s : scored[i]) { hotOrNots.add(new HotOrNot(new MiruValue(termComposer.decompose(schema, pivotFieldDefinition, stackBuffer, s.term)), s.score, s.termCount, s.features)); } solutionLog.log(MiruSolutionLogLevel.INFO, "Strut found {} terms at threshold {}", hotOrNots.size(), thresholds[i]); scoredThreshold = thresholds[i]; break; } } boolean resultsExhausted = request.query.timeRange.smallestTimestamp > requestContext.getTimeIndex().getLargestTimestamp(); return new StrutAnswer(hotOrNots, scoredThreshold, resultsExhausted); } private void score(float[] scores, ModelScore nextScore, float s, long modelCount, long totalCount, Strategy strategy) { if (strategy == Strategy.MAX) { scores[0] = Math.max(scores[0], s); } else if (strategy == Strategy.MEAN) { scores[0] += s; } else if (strategy == Strategy.NAIVE_BAYES) { long hits = nextScore.numerator; long misses = nextScore.denominator - nextScore.numerator; long nonModelCount = totalCount - modelCount; if (scores[0] == 0f) { scores[0] = 1f; } scores[0] *= ((float) (1 + hits) / (1 + modelCount)); if (scores[1] == 0f) { scores[1] = 1f; } scores[1] *= ((float) (1 + misses) / (1 + nonModelCount)); } else { throw new UnsupportedOperationException("Strategy not supported: " + strategy); } } private float finalizeScore(float[] score, int termCount, long modelCount, long totalCount, Strategy strategy) { if (strategy == Strategy.MAX) { return score[0]; } else if (strategy == Strategy.MEAN) { return score[0] / termCount; } else if (strategy == Strategy.NAIVE_BAYES) { long nonModelCount = totalCount - modelCount; score[0] *= ((float) (1 + modelCount) / (1 + totalCount)); score[1] *= ((float) (1 + nonModelCount) / (1 + totalCount)); return score[0] / score[1]; } else { throw new UnsupportedOperationException("Strategy not supported: " + strategy); } } /*public static void main(String[] args) { float totalActivities = 3_000_000f; float viewedActivities = 10_000f; float[] viewedFeatures = { 7f, 3f, 8f }; float[] nonViewedFeatures = { 3f, 12f, 12f }; // 7/10, 3/15, 8/20 float pViewed1 = (7f / 10_000f) * (3f / 10_000f) * (8f / 10_000f) * (10_000f / 3_000_000f); float pNonViewed1 = (3f / 2_990_000f) * (12f / 2_990_000f) * (12f / 2_990_000f) * (2_990_000f / 3_000_000f); float p1 = (10f / 3_000_000f) * (15f / 3_000_000f) * (20f / 3_000_000f); float pViewed2 = (5f / 15_000f) * (6f / 15_000f) * (10f / 15_000f) * (15_000f / 3_000_000f); float pNonViewed2 = (8f / 2_985_000f) * (2f / 2_985_000f) * (2f / 2_985_000f) * (2_985_000f / 3_000_000f); //System.out.println(pViewed1); //System.out.println(pNonViewed1); System.out.println("pV1: " + pViewed1); System.out.println("pNV1: " + pNonViewed1); System.out.println("p1: " + p1); System.out.println("pV1/p1: " + (pViewed1 / p1)); System.out.println("pNV1/p1: " + (pNonViewed1 / p1)); System.out.println("---"); System.out.println(pViewed2 / pNonViewed2); System.out.println((pViewed1 * pViewed2) / (pNonViewed1 * pNonViewed2)); }*/ static class Scored implements Comparable<Scored> { MiruTermId term; float score; int termCount; List<Hotness>[] features; public Scored(MiruTermId term, float score, int termCount, List<Hotness>[] features) { this.term = term; this.score = score; this.termCount = termCount; this.features = features; } @Override public int compareTo(Scored o) { int c = Float.compare(o.score, score); // reversed if (c != 0) { return c; } return term.compareTo(o.term); } } }
use logarithms to prevent numeric overflow in strut naive bayes strategy
miru-stream-plugins/src/main/java/com/jivesoftware/os/miru/stream/plugins/strut/Strut.java
use logarithms to prevent numeric overflow in strut naive bayes strategy
<ide><path>iru-stream-plugins/src/main/java/com/jivesoftware/os/miru/stream/plugins/strut/Strut.java <ide> <ide> long start = System.currentTimeMillis(); <ide> int[] featureCount = { 0 }; <del> float[][] score = new float[thresholds.length][2]; <add> double[][] score = new double[thresholds.length][2]; <ide> int[] termCount = new int[thresholds.length]; <ide> MiruTermId[] currentPivot = { null }; <ide> aggregateUtil.gatherFeatures(name, <ide> return new StrutAnswer(hotOrNots, scoredThreshold, resultsExhausted); <ide> } <ide> <del> private void score(float[] scores, ModelScore nextScore, float s, long modelCount, long totalCount, Strategy strategy) { <add> private void score(double[] scores, ModelScore nextScore, float s, long modelCount, long totalCount, Strategy strategy) { <ide> if (strategy == Strategy.MAX) { <ide> scores[0] = Math.max(scores[0], s); <ide> } else if (strategy == Strategy.MEAN) { <ide> } <ide> } <ide> <del> private float finalizeScore(float[] score, int termCount, long modelCount, long totalCount, Strategy strategy) { <add> private float finalizeScore(double[] score, int termCount, long modelCount, long totalCount, Strategy strategy) { <ide> if (strategy == Strategy.MAX) { <del> return score[0]; <add> return (float) score[0]; <ide> } else if (strategy == Strategy.MEAN) { <del> return score[0] / termCount; <add> return (float) score[0] / termCount; <ide> } else if (strategy == Strategy.NAIVE_BAYES) { <ide> long nonModelCount = totalCount - modelCount; <del> score[0] *= ((float) (1 + modelCount) / (1 + totalCount)); <del> score[1] *= ((float) (1 + nonModelCount) / (1 + totalCount)); <del> return score[0] / score[1]; <add> score[0] *= Math.log((double) (1 + modelCount) / (1 + totalCount)); <add> score[1] *= Math.log((double) (1 + nonModelCount) / (1 + totalCount)); <add> float result = (float) (score[0] / score[1]); <add> return Float.isFinite(result) ? result : Float.isInfinite(result) ? Float.MAX_VALUE : 0f; <ide> } else { <ide> throw new UnsupportedOperationException("Strategy not supported: " + strategy); <ide> }
Java
mit
74d69bd1127ebd5482cd157b7234b3f2199a7fdd
0
csdms/wmt-client,csdms/wmt-client,csdms/wmt-client
package edu.colorado.csdms.wmt.client.ui.handler; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.user.client.Window; import edu.colorado.csdms.wmt.client.control.DataManager; import edu.colorado.csdms.wmt.client.control.DataTransfer; /** * Handles click on the "Reload" button in the ModelActionPanel. It calls * DataTransfer.getComponent to fetch all components from the server. * * @author Mark Piper ([email protected]) */ public class ModelActionPanelReloadHandler implements ClickHandler { private DataManager data; /** * Creates a new instance of {@link ModelActionPanelReloadHandler}. * * @param data the DataManager object for the WMT session */ public ModelActionPanelReloadHandler(DataManager data) { this.data = data; } @Override public void onClick(ClickEvent event) { // Hide the MoreActionsMenu. data.getPerspective().getActionButtonPanel().getMoreMenu().hide(); // Reload each component from the original list. for (String componentId : data.componentIdList) { DataTransfer.reloadComponent(data, componentId); } // Not really, since the above is asynchronous. Window.alert("Components reloaded."); } }
src/edu/colorado/csdms/wmt/client/ui/handler/ModelActionPanelReloadHandler.java
package edu.colorado.csdms.wmt.client.ui.handler; import java.util.Iterator; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.user.client.Window; import edu.colorado.csdms.wmt.client.control.DataManager; import edu.colorado.csdms.wmt.client.control.DataTransfer; /** * Handles click on the "Reload" button in the ModelActionPanel. It calls * DataTransfer.getComponent to fetch all components from the server. * * @author Mark Piper ([email protected]) */ public class ModelActionPanelReloadHandler implements ClickHandler { private DataManager data; /** * Creates a new instance of {@link ModelActionPanelReloadHandler}. * * @param data the DataManager object for the WMT session */ public ModelActionPanelReloadHandler(DataManager data) { this.data = data; } @Override public void onClick(ClickEvent event) { // Hide the MoreActionsMenu. data.getPerspective().getActionButtonPanel().getMoreMenu().hide(); // Reload each component from the original list. for (String componentId : data.componentIdList) { DataTransfer.reloadComponent(data, componentId); } // Not really, since the above is asynchronous. Window.alert("Components reloaded."); } }
Remove unused import
src/edu/colorado/csdms/wmt/client/ui/handler/ModelActionPanelReloadHandler.java
Remove unused import
<ide><path>rc/edu/colorado/csdms/wmt/client/ui/handler/ModelActionPanelReloadHandler.java <ide> package edu.colorado.csdms.wmt.client.ui.handler; <del> <del>import java.util.Iterator; <ide> <ide> import com.google.gwt.event.dom.client.ClickEvent; <ide> import com.google.gwt.event.dom.client.ClickHandler;
Java
apache-2.0
ad4fd524d6a49e570ec61167fb945a398ed6addf
0
smith750/rice,gathreya/rice-kc,smith750/rice,kuali/kc-rice,bhutchinson/rice,sonamuthu/rice-1,gathreya/rice-kc,shahess/rice,gathreya/rice-kc,sonamuthu/rice-1,bsmith83/rice-1,cniesen/rice,cniesen/rice,sonamuthu/rice-1,ewestfal/rice,smith750/rice,geothomasp/kualico-rice-kc,gathreya/rice-kc,ewestfal/rice,cniesen/rice,geothomasp/kualico-rice-kc,ewestfal/rice,gathreya/rice-kc,rojlarge/rice-kc,ewestfal/rice,ewestfal/rice-svn2git-test,jwillia/kc-rice1,kuali/kc-rice,UniversityOfHawaiiORS/rice,shahess/rice,ewestfal/rice,jwillia/kc-rice1,cniesen/rice,ewestfal/rice-svn2git-test,bsmith83/rice-1,cniesen/rice,UniversityOfHawaiiORS/rice,bhutchinson/rice,geothomasp/kualico-rice-kc,kuali/kc-rice,sonamuthu/rice-1,ewestfal/rice-svn2git-test,shahess/rice,bhutchinson/rice,ewestfal/rice-svn2git-test,rojlarge/rice-kc,shahess/rice,geothomasp/kualico-rice-kc,rojlarge/rice-kc,UniversityOfHawaiiORS/rice,jwillia/kc-rice1,shahess/rice,kuali/kc-rice,rojlarge/rice-kc,kuali/kc-rice,bhutchinson/rice,bhutchinson/rice,smith750/rice,UniversityOfHawaiiORS/rice,smith750/rice,jwillia/kc-rice1,geothomasp/kualico-rice-kc,rojlarge/rice-kc,jwillia/kc-rice1,bsmith83/rice-1,UniversityOfHawaiiORS/rice,bsmith83/rice-1
/* * Copyright 2005-2006 The Kuali Foundation. * * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kew.actionrequest; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import org.apache.log4j.Logger; import org.kuali.rice.core.exception.RiceRuntimeException; import org.kuali.rice.kew.actionrequest.service.ActionRequestService; import org.kuali.rice.kew.engine.node.RouteNodeInstance; import org.kuali.rice.kew.exception.WorkflowRuntimeException; import org.kuali.rice.kew.identity.Id; import org.kuali.rice.kew.identity.service.IdentityHelperService; import org.kuali.rice.kew.role.KimRoleRecipient; import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue; import org.kuali.rice.kew.rule.ResolvedQualifiedRole; import org.kuali.rice.kew.service.KEWServiceLocator; import org.kuali.rice.kew.user.RoleRecipient; import org.kuali.rice.kew.user.UserId; import org.kuali.rice.kew.util.CodeTranslator; import org.kuali.rice.kew.util.KEWConstants; import org.kuali.rice.kew.util.Utilities; import org.kuali.rice.kew.workgroup.GroupId; import org.kuali.rice.kim.bo.entity.KimPrincipal; import org.kuali.rice.kim.bo.group.KimGroup; import org.kuali.rice.kim.bo.role.dto.DelegateInfo; import org.kuali.rice.kim.bo.role.dto.KimRoleInfo; import org.kuali.rice.kim.bo.role.dto.ResponsibilityActionInfo; import org.kuali.rice.kim.bo.types.dto.AttributeSet; import org.kuali.rice.kim.service.IdentityManagementService; import org.kuali.rice.kim.service.KIMServiceLocator; import org.kuali.rice.kim.service.RoleManagementService; import org.kuali.rice.kns.util.KNSConstants; /** * A factory to aid in creating the ever-so-gnarly ActionRequestValue object. * * @author Kuali Rice Team ([email protected]) */ public class ActionRequestFactory { private static final Logger LOG = Logger.getLogger(ActionRequestFactory.class); private static RoleManagementService roleManagementService; private static IdentityHelperService identityHelperService; private static IdentityManagementService identityManagementService; private static ActionRequestService actionRequestService; private DocumentRouteHeaderValue document; private RouteNodeInstance routeNode; private List<ActionRequestValue> requestGraphs = new ArrayList<ActionRequestValue>(); public ActionRequestFactory() { } public ActionRequestFactory(DocumentRouteHeaderValue document) { this.document = document; } public ActionRequestFactory(DocumentRouteHeaderValue document, RouteNodeInstance routeNode) { this.document = document; this.routeNode = routeNode; } /** * Constructs ActionRequestValue using default priority and 0 as responsibility * * @param actionRequested * @param recipient * @param description * @param ignorePrevious * * @return ActionRequestValue */ public ActionRequestValue createActionRequest(String actionRequested, Recipient recipient, String description, Boolean ignorePrevious, String annotation) { return createActionRequest(actionRequested, new Integer(0), recipient, description, KEWConstants.MACHINE_GENERATED_RESPONSIBILITY_ID, ignorePrevious, annotation); } public ActionRequestValue createActionRequest(String actionRequested, Integer priority, Recipient recipient, String description, Long responsibilityId, Boolean ignorePrevious, String annotation) { return createActionRequest(actionRequested, priority, recipient, description, responsibilityId, ignorePrevious, null, null, annotation); } public ActionRequestValue createActionRequest(String actionRequested, Integer priority, Recipient recipient, String description, Long responsibilityId, Boolean ignorePrevious, String approvePolicy, Long ruleId, String annotation) { return createActionRequest(actionRequested, priority, recipient, description, responsibilityId, ignorePrevious, null, null, annotation, null); } public ActionRequestValue createActionRequest(String actionRequested, Integer priority, Recipient recipient, String description, Long responsibilityId, Boolean ignorePrevious, String approvePolicy, Long ruleId, String annotation, String requestLabel) { ActionRequestValue actionRequest = new ActionRequestValue(); actionRequest.setActionRequested(actionRequested); actionRequest.setDocVersion(document.getDocVersion()); actionRequest.setPriority(priority); actionRequest.setRouteHeader(document); actionRequest.setRouteHeaderId(document.getRouteHeaderId()); actionRequest.setRouteLevel(document.getDocRouteLevel()); actionRequest.setNodeInstance(routeNode); actionRequest.setResponsibilityId(responsibilityId); actionRequest.setResponsibilityDesc(description); actionRequest.setApprovePolicy(approvePolicy); actionRequest.setIgnorePrevAction(ignorePrevious); actionRequest.setRuleBaseValuesId(ruleId); actionRequest.setAnnotation(annotation); actionRequest.setRequestLabel(requestLabel); setDefaultProperties(actionRequest); resolveRecipient(actionRequest, recipient); return actionRequest; } public ActionRequestValue createBlankActionRequest() { ActionRequestValue request = new ActionRequestValue(); request.setRouteHeader(document); request.setNodeInstance(routeNode); return request; } public ActionRequestValue createNotificationRequest(String actionRequestCode, KimPrincipal principal, String reasonActionCode, KimPrincipal reasonActionUser, String responsibilityDesc) { ActionRequestValue request = createActionRequest(actionRequestCode, new KimPrincipalRecipient(principal), responsibilityDesc, Boolean.TRUE, null); String annotation = generateNotificationAnnotation(reasonActionUser, actionRequestCode, reasonActionCode, request); request.setAnnotation(annotation); return request; } //unify these 2 methods if possible public List generateNotifications(List requests, KimPrincipal principal, Recipient delegator, String notificationRequestCode, String actionTakenCode) { String groupName = Utilities.getKNSParameterValue(KEWConstants.KEW_NAMESPACE, KNSConstants.DetailTypes.WORKGROUP_DETAIL_TYPE, KEWConstants.NOTIFICATION_EXCLUDED_USERS_WORKGROUP_NAME_IND); KimGroup notifyExclusionWorkgroup = getIdentityManagementService().getGroupByName(Utilities.parseGroupNamespaceCode(groupName), Utilities.parseGroupName(groupName)); return generateNotifications(null, getActionRequestService().getRootRequests(requests), principal, delegator, notificationRequestCode, actionTakenCode, notifyExclusionWorkgroup); } private List<ActionRequestValue> generateNotifications(ActionRequestValue parentRequest, List requests, KimPrincipal principal, Recipient delegator, String notificationRequestCode, String actionTakenCode, KimGroup notifyExclusionWorkgroup) { List<ActionRequestValue> notificationRequests = new ArrayList<ActionRequestValue>(); for (Iterator iter = requests.iterator(); iter.hasNext();) { ActionRequestValue actionRequest = (ActionRequestValue) iter.next(); if (!(actionRequest.isRecipientRoutedRequest(principal.getPrincipalId()) || actionRequest.isRecipientRoutedRequest(delegator))) { // skip user requests to system users if( (notifyExclusionWorkgroup != null) && (isRecipientInGroup(notifyExclusionWorkgroup, actionRequest.getRecipient()))) { continue; } ActionRequestValue notificationRequest = createNotificationRequest(actionRequest, principal, notificationRequestCode, actionTakenCode); notificationRequests.add(notificationRequest); if (parentRequest != null) { notificationRequest.setParentActionRequest(parentRequest); parentRequest.getChildrenRequests().add(notificationRequest); } notificationRequests.addAll(generateNotifications(notificationRequest, actionRequest.getChildrenRequests(), principal, delegator, notificationRequestCode, actionTakenCode, notifyExclusionWorkgroup)); } } return notificationRequests; } private boolean isRecipientInGroup(KimGroup group, Recipient recipient) { boolean isMember = false; if(recipient instanceof KimPrincipalRecipient) { String principalId = ((KimPrincipalRecipient) recipient).getPrincipalId(); String groupId = group.getGroupId(); isMember = getIdentityManagementService().isMemberOfGroup(principalId, groupId); } else if (recipient instanceof KimGroupRecipient) { String kimRecipientId = ((KimGroupRecipient) recipient).getGroup().getGroupId(); isMember = getIdentityManagementService().isGroupMemberOfGroup(kimRecipientId, group.getGroupId() ); } return isMember; } private ActionRequestValue createNotificationRequest(ActionRequestValue actionRequest, KimPrincipal reasonPrincipal, String notificationRequestCode, String actionTakenCode) { String annotation = generateNotificationAnnotation(reasonPrincipal, notificationRequestCode, actionTakenCode, actionRequest); ActionRequestValue request = createActionRequest(notificationRequestCode, actionRequest.getPriority(), actionRequest.getRecipient(), actionRequest.getResponsibilityDesc(), KEWConstants.MACHINE_GENERATED_RESPONSIBILITY_ID, Boolean.TRUE, annotation); request.setDocVersion(actionRequest.getDocVersion()); request.setApprovePolicy(actionRequest.getApprovePolicy()); request.setRoleName(actionRequest.getRoleName()); request.setQualifiedRoleName(actionRequest.getQualifiedRoleName()); request.setQualifiedRoleNameLabel(actionRequest.getQualifiedRoleNameLabel()); request.setDelegationType(actionRequest.getDelegationType()); return request; } private void setDefaultProperties(ActionRequestValue actionRequest) { if (actionRequest.getApprovePolicy() == null) { actionRequest.setApprovePolicy(KEWConstants.APPROVE_POLICY_FIRST_APPROVE); } actionRequest.setCreateDate(new Timestamp(System.currentTimeMillis())); actionRequest.setCurrentIndicator(Boolean.TRUE); if (actionRequest.getIgnorePrevAction() == null) { actionRequest.setIgnorePrevAction(Boolean.FALSE); } if (routeNode != null) { actionRequest.setNodeInstance(routeNode); } actionRequest.setJrfVerNbr(new Integer(0)); actionRequest.setStatus(KEWConstants.ACTION_REQUEST_INITIALIZED); actionRequest.setRouteHeader(document); } private static void resolveRecipient(ActionRequestValue actionRequest, Recipient recipient) { if (recipient instanceof KimPrincipalRecipient) { actionRequest.setRecipientTypeCd(KEWConstants.ACTION_REQUEST_USER_RECIPIENT_CD); actionRequest.setPrincipalId(((KimPrincipalRecipient)recipient).getPrincipal().getPrincipalId()); } else if (recipient instanceof KimGroupRecipient) { KimGroupRecipient kimGroupRecipient = (KimGroupRecipient)recipient; actionRequest.setRecipientTypeCd(KEWConstants.ACTION_REQUEST_GROUP_RECIPIENT_CD); actionRequest.setGroupId(kimGroupRecipient.getGroup().getGroupId()); } else if (recipient instanceof RoleRecipient){ RoleRecipient role = (RoleRecipient)recipient; actionRequest.setRecipientTypeCd(KEWConstants.ACTION_REQUEST_ROLE_RECIPIENT_CD); actionRequest.setRoleName(role.getRoleName()); actionRequest.setQualifiedRoleName(role.getQualifiedRoleName()); ResolvedQualifiedRole qualifiedRole = role.getResolvedQualifiedRole(); if (qualifiedRole != null) { actionRequest.setAnnotation(qualifiedRole.getAnnotation() == null ? "" : qualifiedRole.getAnnotation()); actionRequest.setQualifiedRoleNameLabel(qualifiedRole.getQualifiedRoleLabel()); } Recipient targetRecipient = role.getTarget(); if (role.getTarget() != null) { if (targetRecipient instanceof RoleRecipient) { throw new WorkflowRuntimeException("Role Cannot Target a role problem activating request for document " + actionRequest.getRouteHeader().getRouteHeaderId()); } resolveRecipient(actionRequest, role.getTarget()); } } else if (recipient instanceof KimRoleRecipient) { KimRoleRecipient roleRecipient = (KimRoleRecipient)recipient; actionRequest.setRecipientTypeCd(KEWConstants.ACTION_REQUEST_ROLE_RECIPIENT_CD); actionRequest.setRoleName(roleRecipient.getResponsibilities().get(0).getRoleId()); actionRequest.setQualifiedRoleName(roleRecipient.getResponsibilities().get(0).getResponsibilityName()); // what about qualified role name label? // actionRequest.setAnnotation(roleRecipient.getResponsibilities().get(0).getResponsibilityName()); Recipient targetRecipient = roleRecipient.getTarget(); if (targetRecipient != null) { if (targetRecipient instanceof RoleRecipient) { throw new WorkflowRuntimeException("Role Cannot Target a role problem activating request for document " + actionRequest.getRouteHeader().getRouteHeaderId()); } resolveRecipient(actionRequest, roleRecipient.getTarget()); } } } /** * Creates a root Role Request * @param role * @param actionRequested * @param approvePolicy * @param priority * @param responsibilityId * @param ignorePrevious * @param description * @return the created root role request */ public ActionRequestValue addRoleRequest(RoleRecipient role, String actionRequested, String approvePolicy, Integer priority, Long responsibilityId, Boolean ignorePrevious, String description, Long ruleId) { ActionRequestValue requestGraph = createActionRequest(actionRequested, priority, role, description, responsibilityId, ignorePrevious, approvePolicy, ruleId, null); if (role != null && role.getResolvedQualifiedRole() != null && role.getResolvedQualifiedRole().getRecipients() != null) { int legitimateTargets = 0; for (Iterator iter = role.getResolvedQualifiedRole().getRecipients().iterator(); iter.hasNext();) { Id recipientId = (Id) iter.next(); if (recipientId.isEmpty()) { throw new WorkflowRuntimeException("Failed to resolve id of type " + recipientId.getClass().getName() + " returned from role '" + role.getRoleName() + "'. Id returned contained a null or empty value."); } if (recipientId instanceof UserId) { KimPrincipal principal = getIdentityHelperService().getPrincipal((UserId)recipientId); role.setTarget(new KimPrincipalRecipient(principal)); } else if (recipientId instanceof GroupId){ role.setTarget(new KimGroupRecipient(getIdentityHelperService().getGroup((GroupId) recipientId))); } else { throw new WorkflowRuntimeException("Could not process the given type of id: " + recipientId.getClass()); } if (role.getTarget() != null) { legitimateTargets++; ActionRequestValue request = createActionRequest(actionRequested, priority, role, description, responsibilityId, ignorePrevious, null, ruleId, null); request.setParentActionRequest(requestGraph); requestGraph.getChildrenRequests().add(request); } } if (legitimateTargets == 0) { LOG.warn("Role did not yield any legitimate recipients"); } } else { LOG.warn("Didn't create action requests for action request description '" + description + "' because of null role or null part of role object graph."); } requestGraphs.add(requestGraph); return requestGraph; } /** * Generates an ActionRequest graph for the given KIM Responsibilities. This graph includes any associated delegations. */ public ActionRequestValue addRoleResponsibilityRequest(List<ResponsibilityActionInfo> responsibilities, String approvePolicy) { if (responsibilities == null || responsibilities.isEmpty()) { LOG.warn("Didn't create action requests for action request description because no responsibilities were defined."); return null; } // it's assumed the that all in the list have the same action type code, priority number, etc. String actionTypeCode = responsibilities.get(0).getActionTypeCode(); Integer priority = responsibilities.get(0).getPriorityNumber(); boolean ignorePrevious = responsibilities.get(0).isIgnorePrevious(); KimRoleRecipient roleRecipient = new KimRoleRecipient(responsibilities); ActionRequestValue requestGraph = createActionRequest( actionTypeCode, priority, roleRecipient, "", // description KEWConstants.MACHINE_GENERATED_RESPONSIBILITY_ID, ignorePrevious, approvePolicy, null, // ruleId null );// annotation StringBuffer parentAnnotation = new StringBuffer(); for (ResponsibilityActionInfo responsibility : responsibilities) { if ( LOG.isDebugEnabled() ) { LOG.debug( "Processing Responsibility for action request: " + responsibility ); } // KFSMI-2381 - pull information from KIM to populate annotation StringBuffer annotation = new StringBuffer(); KimRoleInfo role = getRoleManagementService().getRole(responsibility.getRoleId()); annotation.append( role.getNamespaceCode() ).append( ' ' ).append( role.getRoleName() ); AttributeSet qualifier = responsibility.getQualifier(); if ( qualifier != null ) { for ( String key : qualifier.keySet() ) { // annotation.append( '\n' ); // annotation.append( key ).append( '=' ).append( qualifier.get(key) ); annotation.append( qualifier.get( key ) ).append( ' ' ); } } if (responsibility.getPrincipalId() != null) { roleRecipient.setTarget(new KimPrincipalRecipient(responsibility.getPrincipalId())); } else if (responsibility.getGroupId() != null) { roleRecipient.setTarget(new KimGroupRecipient(responsibility.getGroupId())); } else { throw new RiceRuntimeException("Failed to identify a group or principal on the given ResponsibilityResolutionInfo."); } ActionRequestValue request = createActionRequest( responsibility.getActionTypeCode(), responsibility.getPriorityNumber(), roleRecipient, "", // description new Long(responsibility.getResponsibilityId()), responsibility.isIgnorePrevious(), approvePolicy, null, // ruleId annotation.toString()); request.setParentActionRequest(requestGraph); generateRoleResponsibilityDelegationRequests(responsibility, request); requestGraph.getChildrenRequests().add(request); parentAnnotation.append( annotation ).append( '/' ); } requestGraph.setAnnotation( parentAnnotation.toString() ); requestGraphs.add(requestGraph); return requestGraph; } private void generateRoleResponsibilityDelegationRequests(ResponsibilityActionInfo responsibility, ActionRequestValue parentRequest) { List<DelegateInfo> delegates = responsibility.getDelegates(); for (DelegateInfo delegate : delegates) { Recipient recipient = null; boolean isPrincipal = delegate.getMemberTypeCode().equals(KEWConstants.ACTION_REQUEST_USER_RECIPIENT_CD); boolean isGroup = delegate.getMemberTypeCode().equals(KEWConstants.ACTION_REQUEST_GROUP_RECIPIENT_CD); if (isPrincipal) { recipient = new KimPrincipalRecipient(delegate.getMemberId()); } else if (isGroup) { recipient = new KimGroupRecipient(delegate.getMemberId()); } else { throw new RiceRuntimeException("Invalid DelegateInfo memberTypeCode encountered, was '" + delegate.getMemberTypeCode() + "'"); } String responsibilityDescription = generateRoleResponsibilityDelegateDescription(delegate, isPrincipal, isGroup); addDelegationRequest(parentRequest, recipient, new Long(delegate.getDelegationId()), parentRequest.getIgnorePrevAction(), delegate.getDelegationTypeCode(), responsibilityDescription, null); } } private String generateRoleResponsibilityDelegateDescription(DelegateInfo delegate, boolean isPrincipal, boolean isGroup) { String responsibilityDescription = "Delegation generated from delegation id " + delegate.getDelegationId() + " for "; if (isPrincipal) { responsibilityDescription += "principal "; } else if (isGroup) { responsibilityDescription += "group "; } responsibilityDescription += "'" + delegate.getMemberId() + "'"; return responsibilityDescription; } public ActionRequestValue addDelegationRoleRequest(ActionRequestValue parentRequest, String approvePolicy, RoleRecipient role, Long responsibilityId, Boolean ignorePrevious, String delegationType, String description, Long ruleId) { Recipient parentRecipient = parentRequest.getRecipient(); if (parentRecipient instanceof RoleRecipient) { throw new WorkflowRuntimeException("Cannot delegate on Role Request. It must be a request to a person or workgroup, although that request may be in a role"); } if (! relatedToRoot(parentRequest)) { throw new WorkflowRuntimeException("The parent request is not related to any request managed by this factory"); } ActionRequestValue delegationRoleRequest = createActionRequest(parentRequest.getActionRequested(), parentRequest.getPriority(), role, description, responsibilityId, ignorePrevious, approvePolicy, ruleId, null); delegationRoleRequest.setDelegationType(delegationType); int count = 0; for (Iterator iter = role.getResolvedQualifiedRole().getRecipients().iterator(); iter.hasNext(); count++) { //repeat of createRoleRequest code Id recipientId = (Id) iter.next(); if (recipientId.isEmpty()) { throw new WorkflowRuntimeException("Failed to resolve id of type " + recipientId.getClass().getName() + " returned from role '" + role.getRoleName() + "'. Id returned contained a null or empty value."); } if (recipientId instanceof UserId) { role.setTarget(new KimPrincipalRecipient(getIdentityHelperService().getPrincipal((UserId) recipientId))); } else if (recipientId instanceof GroupId) { role.setTarget(new KimGroupRecipient(getIdentityHelperService().getGroup((GroupId) recipientId))); } else { throw new WorkflowRuntimeException("Could not process the given type of id: " + recipientId.getClass()); } ActionRequestValue request = createActionRequest(parentRequest.getActionRequested(), parentRequest.getPriority(), role, description, responsibilityId, ignorePrevious, null, ruleId, null); request.setDelegationType(delegationType); //end repeat request.setParentActionRequest(delegationRoleRequest); delegationRoleRequest.getChildrenRequests().add(request); } //put this mini graph in the larger graph if (count > 0) { parentRequest.getChildrenRequests().add(delegationRoleRequest); delegationRoleRequest.setParentActionRequest(parentRequest); } return delegationRoleRequest; } public ActionRequestValue addDelegationRequest(ActionRequestValue parentRequest, Recipient recipient, Long responsibilityId, Boolean ignorePrevious, String delegationType, String description, Long ruleId) { if (! relatedToRoot(parentRequest)) { throw new WorkflowRuntimeException("The parent request is not related to any request managed by this factory"); } ActionRequestValue delegationRequest = createActionRequest(parentRequest.getActionRequested(), parentRequest.getPriority(), recipient, description, responsibilityId, ignorePrevious, null, ruleId, null); delegationRequest.setDelegationType(delegationType); parentRequest.getChildrenRequests().add(delegationRequest); delegationRequest.setParentActionRequest(parentRequest); return delegationRequest; } //could probably base behavior off of recipient type public ActionRequestValue addRootActionRequest(String actionRequested, Integer priority, Recipient recipient, String description, Long responsibilityId, Boolean ignorePrevious, String approvePolicy, Long ruleId) { ActionRequestValue requestGraph = createActionRequest(actionRequested, priority, recipient, description, responsibilityId, ignorePrevious, approvePolicy, ruleId, null); requestGraphs.add(requestGraph); return requestGraph; } //return true if requestGraph (root) is in this requests' parents public boolean relatedToRoot(ActionRequestValue request) { int i = 0; while(i < 3) { if (requestGraphs.contains(request)) { return true; } else if (request == null) { return false; } i++; request = request.getParentActionRequest(); } return false; } public List getRequestGraphs() { //clean up all the trailing role requests with no children - requestGraphs.removeAll(cleanUpChildren(requestGraphs)); return requestGraphs; } private Collection cleanUpChildren(Collection children) { Collection requestsToRemove = new ArrayList(); for (Iterator iter = children.iterator(); iter.hasNext();) { ActionRequestValue request = (ActionRequestValue)iter.next(); if (request.isRoleRequest()) { if (request.getChildrenRequests().isEmpty()) { requestsToRemove.add(request); } else { Collection childRequestsToRemove = cleanUpChildren(request.getChildrenRequests()); request.getChildrenRequests().removeAll(childRequestsToRemove); } } } return requestsToRemove; } private String generateNotificationAnnotation(KimPrincipal principal, String notificationRequestCode, String actionTakenCode, ActionRequestValue request) { String notification = "Action " + CodeTranslator.getActionRequestLabel(notificationRequestCode) + " generated by Workflow because " + principal.getPrincipalName() + " took action " + CodeTranslator.getActionTakenLabel(actionTakenCode); if (request.getResponsibilityId() != null && request.getResponsibilityId().longValue() != 0) { notification += " Responsibility " + request.getResponsibilityId(); } if (request.getRuleBaseValuesId() != null) { notification += " Rule Id " + request.getRuleBaseValuesId(); } return notification; } protected static ActionRequestService getActionRequestService() { if ( actionRequestService == null ) { actionRequestService = KEWServiceLocator.getActionRequestService(); } return actionRequestService; } /** * @return the roleManagementService */ protected static RoleManagementService getRoleManagementService() { if ( roleManagementService == null ) { roleManagementService = KIMServiceLocator.getRoleManagementService(); } return roleManagementService; } /** * @return the identityHelperService */ protected static IdentityHelperService getIdentityHelperService() { if ( identityHelperService == null ) { identityHelperService = KEWServiceLocator.getIdentityHelperService(); } return identityHelperService; } /** * @return the identityManagementService */ protected static IdentityManagementService getIdentityManagementService() { if ( identityManagementService == null ) { identityManagementService = KIMServiceLocator.getIdentityManagementService(); } return identityManagementService; } }
impl/src/main/java/org/kuali/rice/kew/actionrequest/ActionRequestFactory.java
/* * Copyright 2005-2006 The Kuali Foundation. * * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kew.actionrequest; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import org.apache.log4j.Logger; import org.kuali.rice.core.exception.RiceRuntimeException; import org.kuali.rice.kew.actionrequest.service.ActionRequestService; import org.kuali.rice.kew.engine.node.RouteNodeInstance; import org.kuali.rice.kew.exception.WorkflowRuntimeException; import org.kuali.rice.kew.identity.Id; import org.kuali.rice.kew.identity.service.IdentityHelperService; import org.kuali.rice.kew.role.KimRoleRecipient; import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue; import org.kuali.rice.kew.rule.ResolvedQualifiedRole; import org.kuali.rice.kew.service.KEWServiceLocator; import org.kuali.rice.kew.user.RoleRecipient; import org.kuali.rice.kew.user.UserId; import org.kuali.rice.kew.util.CodeTranslator; import org.kuali.rice.kew.util.KEWConstants; import org.kuali.rice.kew.util.Utilities; import org.kuali.rice.kew.workgroup.GroupId; import org.kuali.rice.kim.bo.entity.KimPrincipal; import org.kuali.rice.kim.bo.group.KimGroup; import org.kuali.rice.kim.bo.role.dto.DelegateInfo; import org.kuali.rice.kim.bo.role.dto.KimRoleInfo; import org.kuali.rice.kim.bo.role.dto.ResponsibilityActionInfo; import org.kuali.rice.kim.bo.types.dto.AttributeSet; import org.kuali.rice.kim.service.IdentityManagementService; import org.kuali.rice.kim.service.KIMServiceLocator; import org.kuali.rice.kim.service.RoleManagementService; import org.kuali.rice.kim.service.support.KimRoleTypeService; import org.kuali.rice.kns.util.KNSConstants; /** * A factory to aid in creating the ever-so-gnarly ActionRequestValue object. * * @author Kuali Rice Team ([email protected]) */ public class ActionRequestFactory { private static final Logger LOG = Logger.getLogger(ActionRequestFactory.class); private static RoleManagementService roleManagementService; private static IdentityHelperService identityHelperService; private static IdentityManagementService identityManagementService; private static ActionRequestService actionRequestService; private DocumentRouteHeaderValue document; private RouteNodeInstance routeNode; private List<ActionRequestValue> requestGraphs = new ArrayList<ActionRequestValue>(); public ActionRequestFactory() { } public ActionRequestFactory(DocumentRouteHeaderValue document) { this.document = document; } public ActionRequestFactory(DocumentRouteHeaderValue document, RouteNodeInstance routeNode) { this.document = document; this.routeNode = routeNode; } /** * Constructs ActionRequestValue using default priority and 0 as responsibility * * @param actionRequested * @param recipient * @param description * @param ignorePrevious * * @return ActionRequestValue */ public ActionRequestValue createActionRequest(String actionRequested, Recipient recipient, String description, Boolean ignorePrevious, String annotation) { return createActionRequest(actionRequested, new Integer(0), recipient, description, KEWConstants.MACHINE_GENERATED_RESPONSIBILITY_ID, ignorePrevious, annotation); } public ActionRequestValue createActionRequest(String actionRequested, Integer priority, Recipient recipient, String description, Long responsibilityId, Boolean ignorePrevious, String annotation) { return createActionRequest(actionRequested, priority, recipient, description, responsibilityId, ignorePrevious, null, null, annotation); } public ActionRequestValue createActionRequest(String actionRequested, Integer priority, Recipient recipient, String description, Long responsibilityId, Boolean ignorePrevious, String approvePolicy, Long ruleId, String annotation) { return createActionRequest(actionRequested, priority, recipient, description, responsibilityId, ignorePrevious, null, null, annotation, null); } public ActionRequestValue createActionRequest(String actionRequested, Integer priority, Recipient recipient, String description, Long responsibilityId, Boolean ignorePrevious, String approvePolicy, Long ruleId, String annotation, String requestLabel) { ActionRequestValue actionRequest = new ActionRequestValue(); actionRequest.setActionRequested(actionRequested); actionRequest.setDocVersion(document.getDocVersion()); actionRequest.setPriority(priority); actionRequest.setRouteHeader(document); actionRequest.setRouteHeaderId(document.getRouteHeaderId()); actionRequest.setRouteLevel(document.getDocRouteLevel()); actionRequest.setNodeInstance(routeNode); actionRequest.setResponsibilityId(responsibilityId); actionRequest.setResponsibilityDesc(description); actionRequest.setApprovePolicy(approvePolicy); actionRequest.setIgnorePrevAction(ignorePrevious); actionRequest.setRuleBaseValuesId(ruleId); actionRequest.setAnnotation(annotation); actionRequest.setRequestLabel(requestLabel); setDefaultProperties(actionRequest); resolveRecipient(actionRequest, recipient); return actionRequest; } public ActionRequestValue createBlankActionRequest() { ActionRequestValue request = new ActionRequestValue(); request.setRouteHeader(document); request.setNodeInstance(routeNode); return request; } public ActionRequestValue createNotificationRequest(String actionRequestCode, KimPrincipal principal, String reasonActionCode, KimPrincipal reasonActionUser, String responsibilityDesc) { ActionRequestValue request = createActionRequest(actionRequestCode, new KimPrincipalRecipient(principal), responsibilityDesc, Boolean.TRUE, null); String annotation = generateNotificationAnnotation(reasonActionUser, actionRequestCode, reasonActionCode, request); request.setAnnotation(annotation); return request; } //unify these 2 methods if possible public List generateNotifications(List requests, KimPrincipal principal, Recipient delegator, String notificationRequestCode, String actionTakenCode) { String groupName = Utilities.getKNSParameterValue(KEWConstants.KEW_NAMESPACE, KNSConstants.DetailTypes.WORKGROUP_DETAIL_TYPE, KEWConstants.NOTIFICATION_EXCLUDED_USERS_WORKGROUP_NAME_IND); KimGroup notifyExclusionWorkgroup = getIdentityManagementService().getGroupByName(Utilities.parseGroupNamespaceCode(groupName), Utilities.parseGroupName(groupName)); return generateNotifications(null, getActionRequestService().getRootRequests(requests), principal, delegator, notificationRequestCode, actionTakenCode, notifyExclusionWorkgroup); } private List<ActionRequestValue> generateNotifications(ActionRequestValue parentRequest, List requests, KimPrincipal principal, Recipient delegator, String notificationRequestCode, String actionTakenCode, KimGroup notifyExclusionWorkgroup) { List<ActionRequestValue> notificationRequests = new ArrayList<ActionRequestValue>(); for (Iterator iter = requests.iterator(); iter.hasNext();) { ActionRequestValue actionRequest = (ActionRequestValue) iter.next(); if (!(actionRequest.isRecipientRoutedRequest(principal.getPrincipalId()) || actionRequest.isRecipientRoutedRequest(delegator))) { // skip user requests to system users if( (notifyExclusionWorkgroup != null) && (isRecipientInGroup(notifyExclusionWorkgroup, actionRequest.getRecipient()))) { continue; } ActionRequestValue notificationRequest = createNotificationRequest(actionRequest, principal, notificationRequestCode, actionTakenCode); notificationRequests.add(notificationRequest); if (parentRequest != null) { notificationRequest.setParentActionRequest(parentRequest); parentRequest.getChildrenRequests().add(notificationRequest); } notificationRequests.addAll(generateNotifications(notificationRequest, actionRequest.getChildrenRequests(), principal, delegator, notificationRequestCode, actionTakenCode, notifyExclusionWorkgroup)); } } return notificationRequests; } private boolean isRecipientInGroup(KimGroup group, Recipient recipient) { boolean isMember = false; if(recipient instanceof KimPrincipalRecipient) { String principalId = ((KimPrincipalRecipient) recipient).getPrincipalId(); String groupId = group.getGroupId(); isMember = getIdentityManagementService().isMemberOfGroup(principalId, groupId); } else if (recipient instanceof KimGroupRecipient) { String kimRecipientId = ((KimGroupRecipient) recipient).getGroup().getGroupId(); isMember = getIdentityManagementService().isGroupMemberOfGroup(kimRecipientId, group.getGroupId() ); } return isMember; } private ActionRequestValue createNotificationRequest(ActionRequestValue actionRequest, KimPrincipal reasonPrincipal, String notificationRequestCode, String actionTakenCode) { String annotation = generateNotificationAnnotation(reasonPrincipal, notificationRequestCode, actionTakenCode, actionRequest); ActionRequestValue request = createActionRequest(notificationRequestCode, actionRequest.getPriority(), actionRequest.getRecipient(), actionRequest.getResponsibilityDesc(), KEWConstants.MACHINE_GENERATED_RESPONSIBILITY_ID, Boolean.TRUE, annotation); request.setDocVersion(actionRequest.getDocVersion()); request.setApprovePolicy(actionRequest.getApprovePolicy()); request.setRoleName(actionRequest.getRoleName()); request.setQualifiedRoleName(actionRequest.getQualifiedRoleName()); request.setQualifiedRoleNameLabel(actionRequest.getQualifiedRoleNameLabel()); request.setDelegationType(actionRequest.getDelegationType()); return request; } private void setDefaultProperties(ActionRequestValue actionRequest) { if (actionRequest.getApprovePolicy() == null) { actionRequest.setApprovePolicy(KEWConstants.APPROVE_POLICY_FIRST_APPROVE); } actionRequest.setCreateDate(new Timestamp(System.currentTimeMillis())); actionRequest.setCurrentIndicator(Boolean.TRUE); if (actionRequest.getIgnorePrevAction() == null) { actionRequest.setIgnorePrevAction(Boolean.FALSE); } if (routeNode != null) { actionRequest.setNodeInstance(routeNode); } actionRequest.setJrfVerNbr(new Integer(0)); actionRequest.setStatus(KEWConstants.ACTION_REQUEST_INITIALIZED); actionRequest.setRouteHeader(document); } private static void resolveRecipient(ActionRequestValue actionRequest, Recipient recipient) { if (recipient instanceof KimPrincipalRecipient) { actionRequest.setRecipientTypeCd(KEWConstants.ACTION_REQUEST_USER_RECIPIENT_CD); actionRequest.setPrincipalId(((KimPrincipalRecipient)recipient).getPrincipal().getPrincipalId()); } else if (recipient instanceof KimGroupRecipient) { KimGroupRecipient kimGroupRecipient = (KimGroupRecipient)recipient; actionRequest.setRecipientTypeCd(KEWConstants.ACTION_REQUEST_GROUP_RECIPIENT_CD); actionRequest.setGroupId(kimGroupRecipient.getGroup().getGroupId()); } else if (recipient instanceof RoleRecipient){ RoleRecipient role = (RoleRecipient)recipient; actionRequest.setRecipientTypeCd(KEWConstants.ACTION_REQUEST_ROLE_RECIPIENT_CD); actionRequest.setRoleName(role.getRoleName()); actionRequest.setQualifiedRoleName(role.getQualifiedRoleName()); ResolvedQualifiedRole qualifiedRole = role.getResolvedQualifiedRole(); if (qualifiedRole != null) { actionRequest.setAnnotation(qualifiedRole.getAnnotation() == null ? "" : qualifiedRole.getAnnotation()); actionRequest.setQualifiedRoleNameLabel(qualifiedRole.getQualifiedRoleLabel()); } Recipient targetRecipient = role.getTarget(); if (role.getTarget() != null) { if (targetRecipient instanceof RoleRecipient) { throw new WorkflowRuntimeException("Role Cannot Target a role problem activating request for document " + actionRequest.getRouteHeader().getRouteHeaderId()); } resolveRecipient(actionRequest, role.getTarget()); } } else if (recipient instanceof KimRoleRecipient) { KimRoleRecipient roleRecipient = (KimRoleRecipient)recipient; actionRequest.setRecipientTypeCd(KEWConstants.ACTION_REQUEST_ROLE_RECIPIENT_CD); actionRequest.setRoleName(roleRecipient.getResponsibilities().get(0).getRoleId()); actionRequest.setQualifiedRoleName(roleRecipient.getResponsibilities().get(0).getResponsibilityName()); // what about qualified role name label? // actionRequest.setAnnotation(roleRecipient.getResponsibilities().get(0).getResponsibilityName()); Recipient targetRecipient = roleRecipient.getTarget(); if (targetRecipient != null) { if (targetRecipient instanceof RoleRecipient) { throw new WorkflowRuntimeException("Role Cannot Target a role problem activating request for document " + actionRequest.getRouteHeader().getRouteHeaderId()); } resolveRecipient(actionRequest, roleRecipient.getTarget()); } } } /** * Creates a root Role Request * @param role * @param actionRequested * @param approvePolicy * @param priority * @param responsibilityId * @param ignorePrevious * @param description * @return the created root role request */ public ActionRequestValue addRoleRequest(RoleRecipient role, String actionRequested, String approvePolicy, Integer priority, Long responsibilityId, Boolean ignorePrevious, String description, Long ruleId) { ActionRequestValue requestGraph = createActionRequest(actionRequested, priority, role, description, responsibilityId, ignorePrevious, approvePolicy, ruleId, null); if (role != null && role.getResolvedQualifiedRole() != null && role.getResolvedQualifiedRole().getRecipients() != null) { int legitimateTargets = 0; for (Iterator iter = role.getResolvedQualifiedRole().getRecipients().iterator(); iter.hasNext();) { Id recipientId = (Id) iter.next(); if (recipientId.isEmpty()) { throw new WorkflowRuntimeException("Failed to resolve id of type " + recipientId.getClass().getName() + " returned from role '" + role.getRoleName() + "'. Id returned contained a null or empty value."); } if (recipientId instanceof UserId) { KimPrincipal principal = getIdentityHelperService().getPrincipal((UserId)recipientId); role.setTarget(new KimPrincipalRecipient(principal)); } else if (recipientId instanceof GroupId){ role.setTarget(new KimGroupRecipient(getIdentityHelperService().getGroup((GroupId) recipientId))); } else { throw new WorkflowRuntimeException("Could not process the given type of id: " + recipientId.getClass()); } if (role.getTarget() != null) { legitimateTargets++; ActionRequestValue request = createActionRequest(actionRequested, priority, role, description, responsibilityId, ignorePrevious, null, ruleId, null); request.setParentActionRequest(requestGraph); requestGraph.getChildrenRequests().add(request); } } if (legitimateTargets == 0) { LOG.warn("Role did not yield any legitimate recipients"); } } else { LOG.warn("Didn't create action requests for action request description '" + description + "' because of null role or null part of role object graph."); } requestGraphs.add(requestGraph); return requestGraph; } /** * Generates an ActionRequest graph for the given KIM Responsibilities. This graph includes any associated delegations. */ public ActionRequestValue addRoleResponsibilityRequest(List<ResponsibilityActionInfo> responsibilities, String approvePolicy) { if (responsibilities == null || responsibilities.isEmpty()) { LOG.warn("Didn't create action requests for action request description because no responsibilities were defined."); return null; } // it's assumed the that all in the list have the same action type code, priority number, etc. String actionTypeCode = responsibilities.get(0).getActionTypeCode(); Integer priority = responsibilities.get(0).getPriorityNumber(); boolean ignorePrevious = responsibilities.get(0).isIgnorePrevious(); KimRoleRecipient roleRecipient = new KimRoleRecipient(responsibilities); ActionRequestValue requestGraph = createActionRequest( actionTypeCode, priority, roleRecipient, "", // description KEWConstants.MACHINE_GENERATED_RESPONSIBILITY_ID, ignorePrevious, approvePolicy, null, // ruleId null );// annotation StringBuffer parentAnnotation = new StringBuffer(); for (ResponsibilityActionInfo responsibility : responsibilities) { if ( LOG.isDebugEnabled() ) { LOG.debug( "Processing Responsibility for action request: " + responsibility ); } // KFSMI-2381 - pull information from KIM to populate annotation StringBuffer annotation = new StringBuffer(); KimRoleInfo role = getRoleManagementService().getRole(responsibility.getRoleId()); annotation.append( role.getNamespaceCode() ).append( ' ' ).append( role.getRoleName() ); AttributeSet qualifier = responsibility.getQualifier(); if ( qualifier != null ) { for ( String key : qualifier.keySet() ) { annotation.append( '\n' ); annotation.append( key ).append( '=' ).append( qualifier.get(key) ); } } if (responsibility.getPrincipalId() != null) { roleRecipient.setTarget(new KimPrincipalRecipient(responsibility.getPrincipalId())); } else if (responsibility.getGroupId() != null) { roleRecipient.setTarget(new KimGroupRecipient(responsibility.getGroupId())); } else { throw new RiceRuntimeException("Failed to identify a group or principal on the given ResponsibilityResolutionInfo."); } ActionRequestValue request = createActionRequest( responsibility.getActionTypeCode(), responsibility.getPriorityNumber(), roleRecipient, "", // description new Long(responsibility.getResponsibilityId()), responsibility.isIgnorePrevious(), approvePolicy, null, // ruleId annotation.toString()); request.setParentActionRequest(requestGraph); generateRoleResponsibilityDelegationRequests(responsibility, request); requestGraph.getChildrenRequests().add(request); parentAnnotation.append( annotation ); } requestGraph.setAnnotation( parentAnnotation.toString() ); requestGraphs.add(requestGraph); return requestGraph; } private void generateRoleResponsibilityDelegationRequests(ResponsibilityActionInfo responsibility, ActionRequestValue parentRequest) { List<DelegateInfo> delegates = responsibility.getDelegates(); for (DelegateInfo delegate : delegates) { Recipient recipient = null; boolean isPrincipal = delegate.getMemberTypeCode().equals(KEWConstants.ACTION_REQUEST_USER_RECIPIENT_CD); boolean isGroup = delegate.getMemberTypeCode().equals(KEWConstants.ACTION_REQUEST_GROUP_RECIPIENT_CD); if (isPrincipal) { recipient = new KimPrincipalRecipient(delegate.getMemberId()); } else if (isGroup) { recipient = new KimGroupRecipient(delegate.getMemberId()); } else { throw new RiceRuntimeException("Invalid DelegateInfo memberTypeCode encountered, was '" + delegate.getMemberTypeCode() + "'"); } String responsibilityDescription = generateRoleResponsibilityDelegateDescription(delegate, isPrincipal, isGroup); addDelegationRequest(parentRequest, recipient, new Long(delegate.getDelegationId()), parentRequest.getIgnorePrevAction(), delegate.getDelegationTypeCode(), responsibilityDescription, null); } } private String generateRoleResponsibilityDelegateDescription(DelegateInfo delegate, boolean isPrincipal, boolean isGroup) { String responsibilityDescription = "Delegation generated from delegation id " + delegate.getDelegationId() + " for "; if (isPrincipal) { responsibilityDescription += "principal "; } else if (isGroup) { responsibilityDescription += "group "; } responsibilityDescription += "'" + delegate.getMemberId() + "'"; return responsibilityDescription; } public ActionRequestValue addDelegationRoleRequest(ActionRequestValue parentRequest, String approvePolicy, RoleRecipient role, Long responsibilityId, Boolean ignorePrevious, String delegationType, String description, Long ruleId) { Recipient parentRecipient = parentRequest.getRecipient(); if (parentRecipient instanceof RoleRecipient) { throw new WorkflowRuntimeException("Cannot delegate on Role Request. It must be a request to a person or workgroup, although that request may be in a role"); } if (! relatedToRoot(parentRequest)) { throw new WorkflowRuntimeException("The parent request is not related to any request managed by this factory"); } ActionRequestValue delegationRoleRequest = createActionRequest(parentRequest.getActionRequested(), parentRequest.getPriority(), role, description, responsibilityId, ignorePrevious, approvePolicy, ruleId, null); delegationRoleRequest.setDelegationType(delegationType); int count = 0; for (Iterator iter = role.getResolvedQualifiedRole().getRecipients().iterator(); iter.hasNext(); count++) { //repeat of createRoleRequest code Id recipientId = (Id) iter.next(); if (recipientId.isEmpty()) { throw new WorkflowRuntimeException("Failed to resolve id of type " + recipientId.getClass().getName() + " returned from role '" + role.getRoleName() + "'. Id returned contained a null or empty value."); } if (recipientId instanceof UserId) { role.setTarget(new KimPrincipalRecipient(getIdentityHelperService().getPrincipal((UserId) recipientId))); } else if (recipientId instanceof GroupId) { role.setTarget(new KimGroupRecipient(getIdentityHelperService().getGroup((GroupId) recipientId))); } else { throw new WorkflowRuntimeException("Could not process the given type of id: " + recipientId.getClass()); } ActionRequestValue request = createActionRequest(parentRequest.getActionRequested(), parentRequest.getPriority(), role, description, responsibilityId, ignorePrevious, null, ruleId, null); request.setDelegationType(delegationType); //end repeat request.setParentActionRequest(delegationRoleRequest); delegationRoleRequest.getChildrenRequests().add(request); } //put this mini graph in the larger graph if (count > 0) { parentRequest.getChildrenRequests().add(delegationRoleRequest); delegationRoleRequest.setParentActionRequest(parentRequest); } return delegationRoleRequest; } public ActionRequestValue addDelegationRequest(ActionRequestValue parentRequest, Recipient recipient, Long responsibilityId, Boolean ignorePrevious, String delegationType, String description, Long ruleId) { if (! relatedToRoot(parentRequest)) { throw new WorkflowRuntimeException("The parent request is not related to any request managed by this factory"); } ActionRequestValue delegationRequest = createActionRequest(parentRequest.getActionRequested(), parentRequest.getPriority(), recipient, description, responsibilityId, ignorePrevious, null, ruleId, null); delegationRequest.setDelegationType(delegationType); parentRequest.getChildrenRequests().add(delegationRequest); delegationRequest.setParentActionRequest(parentRequest); return delegationRequest; } //could probably base behavior off of recipient type public ActionRequestValue addRootActionRequest(String actionRequested, Integer priority, Recipient recipient, String description, Long responsibilityId, Boolean ignorePrevious, String approvePolicy, Long ruleId) { ActionRequestValue requestGraph = createActionRequest(actionRequested, priority, recipient, description, responsibilityId, ignorePrevious, approvePolicy, ruleId, null); requestGraphs.add(requestGraph); return requestGraph; } //return true if requestGraph (root) is in this requests' parents public boolean relatedToRoot(ActionRequestValue request) { int i = 0; while(i < 3) { if (requestGraphs.contains(request)) { return true; } else if (request == null) { return false; } i++; request = request.getParentActionRequest(); } return false; } public List getRequestGraphs() { //clean up all the trailing role requests with no children - requestGraphs.removeAll(cleanUpChildren(requestGraphs)); return requestGraphs; } private Collection cleanUpChildren(Collection children) { Collection requestsToRemove = new ArrayList(); for (Iterator iter = children.iterator(); iter.hasNext();) { ActionRequestValue request = (ActionRequestValue)iter.next(); if (request.isRoleRequest()) { if (request.getChildrenRequests().isEmpty()) { requestsToRemove.add(request); } else { Collection childRequestsToRemove = cleanUpChildren(request.getChildrenRequests()); request.getChildrenRequests().removeAll(childRequestsToRemove); } } } return requestsToRemove; } private String generateNotificationAnnotation(KimPrincipal principal, String notificationRequestCode, String actionTakenCode, ActionRequestValue request) { String notification = "Action " + CodeTranslator.getActionRequestLabel(notificationRequestCode) + " generated by Workflow because " + principal.getPrincipalName() + " took action " + CodeTranslator.getActionTakenLabel(actionTakenCode); if (request.getResponsibilityId() != null && request.getResponsibilityId().longValue() != 0) { notification += " Responsibility " + request.getResponsibilityId(); } if (request.getRuleBaseValuesId() != null) { notification += " Rule Id " + request.getRuleBaseValuesId(); } return notification; } protected static ActionRequestService getActionRequestService() { if ( actionRequestService == null ) { actionRequestService = KEWServiceLocator.getActionRequestService(); } return actionRequestService; } /** * @return the roleManagementService */ protected static RoleManagementService getRoleManagementService() { if ( roleManagementService == null ) { roleManagementService = KIMServiceLocator.getRoleManagementService(); } return roleManagementService; } /** * @return the identityHelperService */ protected static IdentityHelperService getIdentityHelperService() { if ( identityHelperService == null ) { identityHelperService = KEWServiceLocator.getIdentityHelperService(); } return identityHelperService; } /** * @return the identityManagementService */ protected static IdentityManagementService getIdentityManagementService() { if ( identityManagementService == null ) { identityManagementService = KIMServiceLocator.getIdentityManagementService(); } return identityManagementService; } }
KFSMI-2381 - Updated annotations to not include the label per functional request.
impl/src/main/java/org/kuali/rice/kew/actionrequest/ActionRequestFactory.java
KFSMI-2381 - Updated annotations to not include the label per functional request.
<ide><path>mpl/src/main/java/org/kuali/rice/kew/actionrequest/ActionRequestFactory.java <ide> import org.kuali.rice.kim.service.IdentityManagementService; <ide> import org.kuali.rice.kim.service.KIMServiceLocator; <ide> import org.kuali.rice.kim.service.RoleManagementService; <del>import org.kuali.rice.kim.service.support.KimRoleTypeService; <ide> import org.kuali.rice.kns.util.KNSConstants; <ide> <ide> <ide> annotation.append( role.getNamespaceCode() ).append( ' ' ).append( role.getRoleName() ); <ide> AttributeSet qualifier = responsibility.getQualifier(); <ide> if ( qualifier != null ) { <del> for ( String key : qualifier.keySet() ) { <del> <del> annotation.append( '\n' ); <del> annotation.append( key ).append( '=' ).append( qualifier.get(key) ); <add> for ( String key : qualifier.keySet() ) { <add>// annotation.append( '\n' ); <add>// annotation.append( key ).append( '=' ).append( qualifier.get(key) ); <add> annotation.append( qualifier.get( key ) ).append( ' ' ); <ide> } <ide> } <ide> if (responsibility.getPrincipalId() != null) { <ide> request.setParentActionRequest(requestGraph); <ide> generateRoleResponsibilityDelegationRequests(responsibility, request); <ide> requestGraph.getChildrenRequests().add(request); <del> parentAnnotation.append( annotation ); <add> parentAnnotation.append( annotation ).append( '/' ); <ide> } <ide> requestGraph.setAnnotation( parentAnnotation.toString() ); <ide> requestGraphs.add(requestGraph);
Java
apache-2.0
2cac196e0a5e237a7278e108dd608d36df951807
0
arpost/aiw-i2b2-etl,eurekaclinical/aiw-i2b2-etl
/* * #%L * AIW i2b2 ETL * %% * Copyright (C) 2012 Emory University * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package edu.emory.cci.aiw.i2b2etl; import edu.emory.cci.aiw.i2b2etl.table.InvalidFactException; import edu.emory.cci.aiw.i2b2etl.table.FactHandler; import edu.emory.cci.aiw.i2b2etl.configuration.*; import edu.emory.cci.aiw.i2b2etl.configuration.ConceptsSection.FolderSpec; import edu.emory.cci.aiw.i2b2etl.metadata.Metadata; import edu.emory.cci.aiw.i2b2etl.metadata.InvalidConceptCodeException; import edu.emory.cci.aiw.i2b2etl.metadata.Concept; import edu.emory.cci.aiw.i2b2etl.metadata.OntologyBuildException; import edu.emory.cci.aiw.i2b2etl.configuration.DataSection.DataSpec; import edu.emory.cci.aiw.i2b2etl.metadata.*; import edu.emory.cci.aiw.i2b2etl.table.ConceptDimension; import edu.emory.cci.aiw.i2b2etl.table.PatientDimension; import edu.emory.cci.aiw.i2b2etl.table.ProviderDimension; import edu.emory.cci.aiw.i2b2etl.table.VisitDimension; import java.io.File; import java.sql.*; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang.ArrayUtils; import org.protempa.KnowledgeSource; import org.protempa.KnowledgeSourceReadException; import org.protempa.PropositionDefinition; import org.protempa.ReferenceDefinition; import org.protempa.proposition.Proposition; import org.protempa.proposition.TemporalProposition; import org.protempa.proposition.UniqueId; import org.protempa.query.Query; import org.protempa.query.handler.QueryResultsHandler; import org.protempa.query.handler.QueryResultsHandlerInitException; import org.protempa.query.handler.QueryResultsHandlerProcessingException; import org.protempa.query.handler.table.Link; import org.protempa.query.handler.table.Reference; /** * * @author Andrew Post */ public final class I2B2QueryResultsHandler implements QueryResultsHandler { private static final long serialVersionUID = -1503401944818776787L; private final File confFile; private final boolean inferPropositionIdsNeeded; private KnowledgeSource knowledgeSource; private ConfigurationReader configurationReader; private Metadata ontologyModel; private Connection dataSchemaConnection; private List<FactHandler> factHandlers; private Query query; public I2B2QueryResultsHandler(File confXML) { this(confXML, true); } public I2B2QueryResultsHandler(File confXML, boolean inferPropositionIdsNeeded) { if (confXML == null) { throw new IllegalArgumentException("confXML cannot be null"); } Logger logger = I2b2ETLUtil.logger(); this.confFile = confXML; logger.log(Level.FINE, String.format("Using configuration file: %s", this.confFile.getAbsolutePath())); this.inferPropositionIdsNeeded = inferPropositionIdsNeeded; } /** * Reads the configuration file passed into the constructor, in addition to * setting the knowledge source and query. This method is intended to be * called internally by Protempa. * * @param knowledgeSource the {@link KnowledgeSource}. Cannot be * <code>null</code>. * @param query the {@link Query}. Cannot be <code>null</code>. * @throws QueryResultsHandlerInitException if an error occurs reading the * configuration file. */ @Override public void init(KnowledgeSource knowledgeSource, Query query) throws QueryResultsHandlerInitException { assert knowledgeSource != null : "knowledgeSource cannot be null"; assert query != null : "query cannot be null"; this.knowledgeSource = knowledgeSource; this.query = query; try { readConfiguration(); } catch (ConfigurationReadException ex) { throw new QueryResultsHandlerInitException( "Could not initialize query results handler for query " + query.getId(), ex); } } /** * Builds most of the concept tree, truncates the data tables, opens a * connection to the i2b2 project database, and does some other prep. This * method is called before the first call to{@link #handleQueryResult()}. * * @throws QueryResultsHandlerProcessingException */ @Override public void start() throws QueryResultsHandlerProcessingException { Logger logger = I2b2ETLUtil.logger(); try { mostlyBuildOntology(); truncateDataTables(); this.dataSchemaConnection = openDatabaseConnection("dataschema"); assembleFactHandlers(); logger.log(Level.INFO, "Populating observation facts table for query {0}", this.query.getId()); } catch (KnowledgeSourceReadException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } catch (InstantiationException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } catch (IllegalAccessException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } catch (OntologyBuildException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } catch (SQLException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } } private void assembleFactHandlers() throws IllegalAccessException, InstantiationException, KnowledgeSourceReadException { this.factHandlers = new ArrayList<FactHandler>(); DictionarySection dictSection = this.configurationReader.getDictionarySection(); String visitPropId = dictSection.get("visitDimension"); PropositionDefinition visitPropDef = this.knowledgeSource.readPropositionDefinition(visitPropId); DataSection dataSection = this.configurationReader.getDataSection(); for (DataSection.DataSpec obx : dataSection.getAll()) { PropositionDefinition[] propDefs; Link[] links; if (obx.referenceName != null) { links = new Link[]{new Reference(obx.referenceName)}; ReferenceDefinition refDef = visitPropDef.referenceDefinition(obx.referenceName); String[] propIds = refDef.getPropositionIds(); propDefs = new PropositionDefinition[propIds.length + 1]; propDefs[0] = visitPropDef; for (int i = 1; i < propDefs.length; i++) { propDefs[i] = this.knowledgeSource.readPropositionDefinition( propIds[i - 1]); assert propDefs[i] != null : "Invalid proposition id " + propIds[i - 1]; } } else { links = null; propDefs = new PropositionDefinition[]{visitPropDef}; } String[] potentialDerivedPropIdsArr = this.ontologyModel.extractDerived(propDefs); FactHandler factHandler = new FactHandler(links, obx.propertyName, obx.start, obx.finish, obx.units, potentialDerivedPropIdsArr, this.ontologyModel); this.factHandlers.add(factHandler); } } @Override public void handleQueryResult(String keyId, List<Proposition> propositions, Map<Proposition, List<Proposition>> forwardDerivations, Map<Proposition, List<Proposition>> backwardDerivations, Map<UniqueId, Proposition> references) throws QueryResultsHandlerProcessingException { DictionarySection dictSection = this.configurationReader.getDictionarySection(); String visitPropId = dictSection.get("visitDimension"); try { Set<Proposition> derivedPropositions = new HashSet<Proposition>(); List<Proposition> props = new ArrayList<Proposition>(); for (Proposition prop : propositions) { if (prop.getId().equals(visitPropId)) { props.add(prop); } } for (Proposition prop : props) { DataSection obxSection = this.configurationReader.getDataSection(); DataSpec providerFullNameSpec = obxSection.get(dictSection.get("providerFullName")); DataSpec providerFirstNameSpec = obxSection.get(dictSection.get("providerFirstName")); DataSpec providerMiddleNameSpec = obxSection.get(dictSection.get("providerMiddleName")); DataSpec providerLastNameSpec = obxSection.get(dictSection.get("providerLastName")); ProviderDimension provider = this.ontologyModel.addProviderIfNeeded(prop, providerFullNameSpec.referenceName, providerFullNameSpec.propertyName, providerFirstNameSpec.referenceName, providerFirstNameSpec.propertyName, providerMiddleNameSpec.referenceName, providerMiddleNameSpec.propertyName, providerLastNameSpec.referenceName, providerLastNameSpec.propertyName, references); PatientDimension pd; if ((pd = this.ontologyModel.getPatient(keyId)) == null) { pd = this.ontologyModel.addPatient(keyId, prop, this.configurationReader.getDictionarySection(), this.configurationReader.getDataSection(), references); } VisitDimension vd = this.ontologyModel.addVisit( pd.getPatientNum(), pd.getEncryptedPatientId(), pd.getEncryptedPatientIdSourceSystem(), (TemporalProposition) prop, this.configurationReader.getDictionarySection(), this.configurationReader.getDataSection(), references); for (FactHandler factHandler : this.factHandlers) { factHandler.handleRecord(pd, vd, provider, prop, forwardDerivations, backwardDerivations, references, this.knowledgeSource, derivedPropositions, this.dataSchemaConnection); } } } catch (InvalidPatientRecordException ex) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + this.query.getId(), ex); } catch (InvalidFactException ioe) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + this.query.getId(), ioe); } } @Override public void finish() throws QueryResultsHandlerProcessingException { Logger logger = I2b2ETLUtil.logger(); String queryId = this.query.getId(); logger.log(Level.INFO, "Done populating observation facts table for query {0}", queryId); try { for (FactHandler factHandler : this.factHandlers) { factHandler.clearOut(this.dataSchemaConnection); } this.ontologyModel.buildProviderHierarchy(); // persist Patients & Visits. logger.log(Level.INFO, "Populating dimensions for query {0}", queryId); logger.log(Level.FINE, "Populating patient dimension for query {0}", queryId); PatientDimension.insertAll(this.ontologyModel.getPatients(), this.dataSchemaConnection); logger.log(Level.FINE, "Populating visit dimension for query {0}", queryId); VisitDimension.insertAll(this.ontologyModel.getVisits(), this.dataSchemaConnection); logger.log(Level.FINE, "Inserting ages into observation fact table for query {0}", queryId); PatientDimension.insertAges(this.ontologyModel.getPatients(), this.dataSchemaConnection, this.configurationReader.getDictionarySection().get("ageConceptCodePrefix")); // find Provider root. gather its leaf nodes. persist Providers. logger.log(Level.FINE, "Populating provider dimension for query {0}", queryId); ProviderDimension.insertAll(this.ontologyModel.getProviders(), this.dataSchemaConnection); logger.log(Level.FINE, "Inserting providers into observation fact table for query {0}", queryId); ProviderDimension.insertFacts(this.dataSchemaConnection); // flush hot concepts out of the tree. persist Concepts. logger.log(Level.FINE, "Populating concept dimension for query {0}", this.query.getId()); ConceptDimension.insertAll(this.ontologyModel.getRoot(), this.dataSchemaConnection); this.dataSchemaConnection.close(); this.dataSchemaConnection = null; logger.log(Level.INFO, "Done populating dimensions for query {0}", queryId); logger.log(Level.INFO, "Done populating observation fact table for query {0}", queryId); persistMetadata(); } catch (OntologyBuildException ex) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + queryId, ex); } catch (InvalidConceptCodeException ex) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + queryId, ex); } catch (SQLException ex) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + queryId, ex); } finally { if (this.dataSchemaConnection != null) { try { this.dataSchemaConnection.close(); } catch (SQLException ex) { } } } } @Override public void validate() { } private void readConfiguration() throws ConfigurationReadException { Logger logger = I2b2ETLUtil.logger(); logger.log(Level.FINER, "STEP: read conf.xml"); this.configurationReader = new ConfigurationReader(this.confFile); this.configurationReader.read(); } private void mostlyBuildOntology() throws OntologyBuildException { DictionarySection dictionarySection = this.configurationReader.getDictionarySection(); String rootNodeName = dictionarySection.get("rootNodeName"); this.ontologyModel = new Metadata(this.knowledgeSource, this.query.getPropositionDefinitions(), rootNodeName, this.configurationReader.getConceptsSection().getFolderSpecs(), this.configurationReader.getDictionarySection(), this.configurationReader.getDataSection()); } private void truncateDataTables() throws SQLException { // Truncate the data tables // This is controlled by 'truncateTables' in conf.xml String truncateTables = this.configurationReader.getDictionarySection().get("truncateTables"); if (truncateTables == null || truncateTables.equalsIgnoreCase("true")) { // To do: table names should be parameterized in conf.xml and related to other data String queryId = this.query.getId(); Logger logger = I2b2ETLUtil.logger(); logger.log(Level.INFO, "Truncating data tables for query {0}", queryId); String[] dataschemaTables = {"OBSERVATION_FACT", "CONCEPT_DIMENSION", "PATIENT_DIMENSION", "PATIENT_MAPPING", "PROVIDER_DIMENSION", "VISIT_DIMENSION", "ENCOUNTER_MAPPING"}; Connection conn = openDatabaseConnection("dataschema"); try { for (String tableName : dataschemaTables) { truncateTable(conn, tableName); } conn.close(); conn = null; logger.log(Level.INFO, "Done truncating data tables for query {0}", queryId); } finally { if (conn != null) { try { conn.close(); } catch (SQLException sqle) { } } } logger.log(Level.INFO, "Truncating metadata tables for query {0}", queryId); conn = openDatabaseConnection("metaschema"); try { truncateTable(conn, this.configurationReader.getDictionarySection().get("metaTableName")); // metaTableName in conf.xml conn.close(); conn = null; logger.log(Level.INFO, "Done truncating metadata tables for query {0}", queryId); } finally { if (conn != null) { try { conn.close(); } catch (SQLException sqle) { } } } } } private void truncateTable(Connection conn, String tableName) throws SQLException { Logger logger = I2b2ETLUtil.logger(); String queryId = this.query.getId(); try { String sql = "TRUNCATE TABLE " + tableName; if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "Executing the following SQL for query {0}: {1}", new Object[]{queryId, sql}); } Statement st = conn.createStatement(); try { st.execute(sql); st.close(); st = null; } finally { if (st != null) { try { st.close(); } catch (SQLException sqle) { } } } logger.log(Level.FINE, "Done executing SQL for query {0}", queryId); } catch (SQLException ex) { logger.log(Level.SEVERE, "An error occurred truncating the tables for query " + queryId, ex); throw ex; } } public Connection openDatabaseConnection(String schema) throws SQLException { DatabaseSection.DatabaseSpec db = this.configurationReader.getDatabaseSection().get(schema); Logger logger = I2b2ETLUtil.logger(); if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "Connecting to {0} as user {1} for query {2}", new Object[]{db.connect, db.user, this.query.getId()}); } return DriverManager.getConnection(db.connect, db.user, db.passwd); } private void persistMetadata() throws SQLException { String queryId = this.query.getId(); Logger logger = I2b2ETLUtil.logger(); logger.log(Level.INFO, "Populating metadata tables for query {0}", queryId); Connection cn = openDatabaseConnection("metaschema"); try { persistOntologyIntoI2B2Batch(this.ontologyModel, cn); cn.close(); cn = null; logger.log(Level.INFO, "Done populating metadata tables for query {0}", queryId); } finally { if (cn != null) { try { cn.close(); } catch (SQLException sqle) { } } } } private void persistOntologyIntoI2B2Batch(Metadata model, Connection cn) throws SQLException { // CREATE TABLE "I2B2" // ( // 1 "C_HLEVEL" NUMBER(22,0) NOT NULL ENABLE, // 2 "C_FULLNAME" VARCHAR2(700) NOT NULL ENABLE, // 3 "C_NAME" VARCHAR2(2000) NOT NULL ENABLE, // 4 "C_SYNONYM_CD" CHAR(1) NOT NULL ENABLE, // 5 "C_VISUALATTRIBUTES" CHAR(3) NOT NULL ENABLE, // 6 "C_TOTALNUM" NUMBER(22,0), // 7 "C_BASECODE" VARCHAR2(50), // 8 "C_METADATAXML" CLOB, // 9 "C_FACTTABLECOLUMN" VARCHAR2(50) NOT NULL ENABLE, // 10 "C_TABLENAME" VARCHAR2(50) NOT NULL ENABLE, // 11 "C_COLUMNNAME" VARCHAR2(50) NOT NULL ENABLE, // 12 "C_COLUMNDATATYPE" VARCHAR2(50) NOT NULL ENABLE, // 13 "C_OPERATOR" VARCHAR2(10) NOT NULL ENABLE, // 14 "C_DIMCODE" VARCHAR2(700) NOT NULL ENABLE, // 15 "C_COMMENT" CLOB, // 16 "C_TOOLTIP" VARCHAR2(900), // 17 "UPDATE_DATE" DATE NOT NULL ENABLE, // 18 "DOWNLOAD_DATE" DATE, // 19 "IMPORT_DATE" DATE, // 20 "SOURCESYSTEM_CD" VARCHAR2(50), // 21 "VALUETYPE_CD" VARCHAR2(50) // ) int idx = 0; int plus = 0; int minus = 0; String tableName = this.configurationReader.getDictionarySection().get("metaTableName"); int batchNumber = 0; Logger logger = I2b2ETLUtil.logger(); try { logger.log(Level.FINE, "batch inserting on table {0}", tableName); PreparedStatement ps; ps = cn.prepareStatement("insert into " + tableName + " values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); try { @SuppressWarnings("unchecked") Enumeration<Concept> emu = model.getRoot().depthFirstEnumeration(); /* * A depth-first enumeration should traverse the hierarchies * in the order in which they were created. */ Timestamp importTimestamp = new Timestamp(System.currentTimeMillis()); Set<String> conceptCodes = new HashSet<String>(); while (emu.hasMoreElements()) { Concept concept = emu.nextElement(); ps.setLong(1, concept.getLevel()); ps.setString(2, concept.getI2B2Path()); assert concept.getDisplayName() != null && concept.getDisplayName().length() > 0 : "concept " + concept.getConceptCode() + " (" + concept.getI2B2Path() + ") " + " has an invalid display name '" + concept.getDisplayName() + "'"; ps.setString(3, concept.getDisplayName()); String conceptCode = concept.getConceptCode(); if (conceptCodes.add(conceptCode)) { ps.setString(4, SynonymCode.NOT_SYNONYM.getCode()); } else { ps.setString(4, SynonymCode.SYNONYM.getCode()); } ps.setString(5, concept.getCVisualAttributes()); ps.setObject(6, null); ps.setString(7, conceptCode); // put labParmXml here // ps.setObject(8, null); ps.setString(9, "concept_cd"); ps.setString(10, "concept_dimension"); ps.setString(11, "concept_path"); ps.setString(12, concept.getDataType().getCode()); ps.setString(13, concept.getOperator().getSQLOperator()); ps.setString(14, concept.getDimCode()); ps.setObject(15, null); ps.setString(16, null); ps.setTimestamp(17, importTimestamp); ps.setDate(18, null); ps.setTimestamp(19, importTimestamp); ps.setString(20, MetadataUtil.toSourceSystemCode( concept.getSourceSystemCode())); ps.setString(21, concept.getValueTypeCode().getCode()); ps.addBatch(); if ((++idx % 8192) == 0) { importTimestamp = new Timestamp(System.currentTimeMillis()); batchNumber++; ps.executeBatch(); cn.commit(); idx = 0; plus += 8192; logBatch(tableName, batchNumber); ps.clearBatch(); if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "loaded ontology {0}:{1}", new Object[]{plus, minus}); } } } batchNumber++; ps.executeBatch(); ps.close(); ps = null; } finally { if (ps != null) { try { ps.close(); } catch (SQLException sqle) { } } } cn.commit(); cn.close(); cn = null; logBatch(tableName, batchNumber); logger.log(Level.FINE, "TALLY_META_{0}_PM: {1}:{2}", new Object[]{tableName, plus, minus}); } catch (SQLException e) { logger.log(Level.SEVERE, "Batch failed on OntologyTable " + tableName + ". I2B2 will not be correct.", e); throw e; } finally { if (cn != null) { try { cn.close(); } catch (SQLException sqle) { } } } } private static void logBatch(String tableName, int batchNumber) { Logger logger = I2b2ETLUtil.logger(); if (logger.isLoggable(Level.FINEST)) { Object[] args = new Object[]{tableName, batchNumber}; logger.log(Level.FINEST, "DB_{0}_BATCH={1}", args); } } @Override public String[] getPropositionIdsNeeded() throws KnowledgeSourceReadException { if (!this.inferPropositionIdsNeeded) { return ArrayUtils.EMPTY_STRING_ARRAY; } else { Set<String> result = new HashSet<String>(); DictionarySection dictionarySection = this.configurationReader.getDictionarySection(); String visitPropId = dictionarySection.get("visitDimension"); result.add(visitPropId); PropositionDefinition visitProp = this.knowledgeSource.readPropositionDefinition(visitPropId); DataSection dataSection = this.configurationReader.getDataSection(); for (DataSpec dataSpec : dataSection.getAll()) { if (dataSpec.referenceName != null) { ReferenceDefinition refDef = visitProp.referenceDefinition(dataSpec.referenceName); if (refDef == null) { throw new KnowledgeSourceReadException( "missing reference " + dataSpec.referenceName + " for proposition definition " + visitPropId + " for query " + this.query.getId()); } org.arp.javautil.arrays.Arrays.addAll(result, refDef.getPropositionIds()); } } ConceptsSection conceptsSection = this.configurationReader.getConceptsSection(); for (FolderSpec folderSpec : conceptsSection.getFolderSpecs()) { for (String proposition : folderSpec.propositions) { result.add(proposition); } } for (PropositionDefinition pd : this.query.getPropositionDefinitions()) { result.add(pd.getId()); } return result.toArray(new String[result.size()]); } } }
src/main/java/edu/emory/cci/aiw/i2b2etl/I2B2QueryResultsHandler.java
/* * #%L * AIW i2b2 ETL * %% * Copyright (C) 2012 Emory University * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package edu.emory.cci.aiw.i2b2etl; import edu.emory.cci.aiw.i2b2etl.table.InvalidFactException; import edu.emory.cci.aiw.i2b2etl.table.FactHandler; import edu.emory.cci.aiw.i2b2etl.configuration.*; import edu.emory.cci.aiw.i2b2etl.configuration.ConceptsSection.FolderSpec; import edu.emory.cci.aiw.i2b2etl.metadata.Metadata; import edu.emory.cci.aiw.i2b2etl.metadata.InvalidConceptCodeException; import edu.emory.cci.aiw.i2b2etl.metadata.Concept; import edu.emory.cci.aiw.i2b2etl.metadata.OntologyBuildException; import edu.emory.cci.aiw.i2b2etl.configuration.DataSection.DataSpec; import edu.emory.cci.aiw.i2b2etl.metadata.*; import edu.emory.cci.aiw.i2b2etl.table.ConceptDimension; import edu.emory.cci.aiw.i2b2etl.table.PatientDimension; import edu.emory.cci.aiw.i2b2etl.table.ProviderDimension; import edu.emory.cci.aiw.i2b2etl.table.VisitDimension; import java.io.File; import java.sql.*; import java.util.*; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang.ArrayUtils; import org.protempa.KnowledgeSource; import org.protempa.KnowledgeSourceReadException; import org.protempa.PropositionDefinition; import org.protempa.ReferenceDefinition; import org.protempa.proposition.Proposition; import org.protempa.proposition.TemporalProposition; import org.protempa.proposition.UniqueId; import org.protempa.query.Query; import org.protempa.query.handler.QueryResultsHandler; import org.protempa.query.handler.QueryResultsHandlerInitException; import org.protempa.query.handler.QueryResultsHandlerProcessingException; import org.protempa.query.handler.table.Link; import org.protempa.query.handler.table.Reference; /** * * @author Andrew Post */ public final class I2B2QueryResultsHandler implements QueryResultsHandler { private static final long serialVersionUID = -1503401944818776787L; private final File confFile; private final boolean inferPropositionIdsNeeded; private KnowledgeSource knowledgeSource; private ConfigurationReader configurationReader; private Metadata ontologyModel; private Connection dataSchemaConnection; private List<FactHandler> factHandlers; private Query query; public I2B2QueryResultsHandler(File confXML) { this(confXML, true); } public I2B2QueryResultsHandler(File confXML, boolean inferPropositionIdsNeeded) { if (confXML == null) { throw new IllegalArgumentException("confXML cannot be null"); } Logger logger = I2b2ETLUtil.logger(); this.confFile = confXML; logger.log(Level.FINE, String.format("Using configuration file: %s", this.confFile.getAbsolutePath())); this.inferPropositionIdsNeeded = inferPropositionIdsNeeded; } /** * Reads the configuration file passed into the constructor, in addition to * setting the knowledge source and query. This method is intended to be * called internally by Protempa. * * @param knowledgeSource the {@link KnowledgeSource}. Cannot be * <code>null</code>. * @param query the {@link Query}. Cannot be <code>null</code>. * @throws QueryResultsHandlerInitException if an error occurs reading the * configuration file. */ @Override public void init(KnowledgeSource knowledgeSource, Query query) throws QueryResultsHandlerInitException { assert knowledgeSource != null : "knowledgeSource cannot be null"; assert query != null : "query cannot be null"; this.knowledgeSource = knowledgeSource; this.query = query; try { readConfiguration(); } catch (ConfigurationReadException ex) { throw new QueryResultsHandlerInitException( "Could not initialize query results handler for query " + query.getId(), ex); } } /** * Builds most of the concept tree, truncates the data tables, opens a * connection to the i2b2 project database, and does some other prep. This * method is called before the first call to{@link #handleQueryResult()}. * * @throws QueryResultsHandlerProcessingException */ @Override public void start() throws QueryResultsHandlerProcessingException { Logger logger = I2b2ETLUtil.logger(); try { mostlyBuildOntology(); truncateDataTables(); this.dataSchemaConnection = openDatabaseConnection("dataschema"); assembleFactHandlers(); logger.log(Level.INFO, "Populating observation facts table for query {0}", this.query.getId()); } catch (KnowledgeSourceReadException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } catch (InstantiationException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } catch (IllegalAccessException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } catch (OntologyBuildException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } catch (SQLException ex) { throw new QueryResultsHandlerProcessingException( "Error during query " + this.query.getId(), ex); } } private void assembleFactHandlers() throws IllegalAccessException, InstantiationException, KnowledgeSourceReadException { this.factHandlers = new ArrayList<FactHandler>(); DictionarySection dictSection = this.configurationReader.getDictionarySection(); String visitPropId = dictSection.get("visitDimension"); PropositionDefinition visitPropDef = this.knowledgeSource.readPropositionDefinition(visitPropId); DataSection dataSection = this.configurationReader.getDataSection(); for (DataSection.DataSpec obx : dataSection.getAll()) { PropositionDefinition[] propDefs; Link[] links; if (obx.referenceName != null) { links = new Link[]{new Reference(obx.referenceName)}; ReferenceDefinition refDef = visitPropDef.referenceDefinition(obx.referenceName); String[] propIds = refDef.getPropositionIds(); propDefs = new PropositionDefinition[propIds.length + 1]; propDefs[0] = visitPropDef; for (int i = 1; i < propDefs.length; i++) { propDefs[i] = this.knowledgeSource.readPropositionDefinition( propIds[i - 1]); assert propDefs[i] != null : "Invalid proposition id " + propIds[i - 1]; } } else { links = null; propDefs = new PropositionDefinition[]{visitPropDef}; } String[] potentialDerivedPropIdsArr = this.ontologyModel.extractDerived(propDefs); FactHandler factHandler = new FactHandler(links, obx.propertyName, obx.start, obx.finish, obx.units, potentialDerivedPropIdsArr, this.ontologyModel); this.factHandlers.add(factHandler); } } @Override public void handleQueryResult(String keyId, List<Proposition> propositions, Map<Proposition, List<Proposition>> forwardDerivations, Map<Proposition, List<Proposition>> backwardDerivations, Map<UniqueId, Proposition> references) throws QueryResultsHandlerProcessingException { DictionarySection dictSection = this.configurationReader.getDictionarySection(); String visitPropId = dictSection.get("visitDimension"); try { Set<Proposition> derivedPropositions = new HashSet<Proposition>(); List<Proposition> props = new ArrayList<Proposition>(); for (Proposition prop : propositions) { if (prop.getId().equals(visitPropId)) { props.add(prop); } } for (Proposition prop : props) { DataSection obxSection = this.configurationReader.getDataSection(); DataSpec providerFullNameSpec = obxSection.get(dictSection.get("providerFullName")); DataSpec providerFirstNameSpec = obxSection.get(dictSection.get("providerFirstName")); DataSpec providerMiddleNameSpec = obxSection.get(dictSection.get("providerMiddleName")); DataSpec providerLastNameSpec = obxSection.get(dictSection.get("providerLastName")); ProviderDimension provider = this.ontologyModel.addProviderIfNeeded(prop, providerFullNameSpec.referenceName, providerFullNameSpec.propertyName, providerFirstNameSpec.referenceName, providerFirstNameSpec.propertyName, providerMiddleNameSpec.referenceName, providerMiddleNameSpec.propertyName, providerLastNameSpec.referenceName, providerLastNameSpec.propertyName, references); PatientDimension pd; if ((pd = this.ontologyModel.getPatient(keyId)) == null) { pd = this.ontologyModel.addPatient(keyId, prop, this.configurationReader.getDictionarySection(), this.configurationReader.getDataSection(), references); } VisitDimension vd = this.ontologyModel.addVisit( pd.getPatientNum(), pd.getEncryptedPatientId(), pd.getEncryptedPatientIdSourceSystem(), (TemporalProposition) prop, this.configurationReader.getDictionarySection(), this.configurationReader.getDataSection(), references); for (FactHandler factHandler : this.factHandlers) { factHandler.handleRecord(pd, vd, provider, prop, forwardDerivations, backwardDerivations, references, this.knowledgeSource, derivedPropositions, this.dataSchemaConnection); } } } catch (InvalidPatientRecordException ex) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + this.query.getId(), ex); } catch (InvalidFactException ioe) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + this.query.getId(), ioe); } } @Override public void finish() throws QueryResultsHandlerProcessingException { Logger logger = I2b2ETLUtil.logger(); String queryId = this.query.getId(); logger.log(Level.INFO, "Done populating observation facts table for query {0}", queryId); try { for (FactHandler factHandler : this.factHandlers) { factHandler.clearOut(this.dataSchemaConnection); } this.ontologyModel.buildProviderHierarchy(); // persist Patients & Visits. logger.log(Level.INFO, "Populating dimensions for query {0}", queryId); logger.log(Level.FINE, "Populating patient dimension for query {0}", queryId); PatientDimension.insertAll(this.ontologyModel.getPatients(), this.dataSchemaConnection); logger.log(Level.FINE, "Populating visit dimension for query {0}", queryId); VisitDimension.insertAll(this.ontologyModel.getVisits(), this.dataSchemaConnection); logger.log(Level.FINE, "Inserting ages into observation fact table for query {0}", queryId); PatientDimension.insertAges(this.ontologyModel.getPatients(), this.dataSchemaConnection, this.configurationReader.getDictionarySection().get("ageConceptCodePrefix")); // find Provider root. gather its leaf nodes. persist Providers. logger.log(Level.FINE, "Populating provider dimension for query {0}", queryId); ProviderDimension.insertAll(this.ontologyModel.getProviders(), this.dataSchemaConnection); logger.log(Level.FINE, "Inserting providers into observation fact table for query {0}", queryId); ProviderDimension.insertFacts(this.dataSchemaConnection); // flush hot concepts out of the tree. persist Concepts. logger.log(Level.FINE, "Populating concept dimension for query {0}", this.query.getId()); ConceptDimension.insertAll(this.ontologyModel.getRoot(), this.dataSchemaConnection); this.dataSchemaConnection.close(); this.dataSchemaConnection = null; logger.log(Level.INFO, "Done populating dimensions for query {0}", queryId); logger.log(Level.INFO, "Done populating observation fact table for query {0}", queryId); persistMetadata(); } catch (OntologyBuildException ex) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + queryId, ex); } catch (InvalidConceptCodeException ex) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + queryId, ex); } catch (SQLException ex) { throw new QueryResultsHandlerProcessingException( "Load into i2b2 failed for query " + queryId, ex); } finally { if (this.dataSchemaConnection != null) { try { this.dataSchemaConnection.close(); } catch (SQLException ex) { } } } } @Override public void validate() { } private void readConfiguration() throws ConfigurationReadException { Logger logger = I2b2ETLUtil.logger(); logger.log(Level.FINER, "STEP: read conf.xml"); this.configurationReader = new ConfigurationReader(this.confFile); this.configurationReader.read(); } private void mostlyBuildOntology() throws OntologyBuildException { DictionarySection dictionarySection = this.configurationReader.getDictionarySection(); String rootNodeName = dictionarySection.get("rootNodeName"); this.ontologyModel = new Metadata(this.knowledgeSource, this.query.getPropositionDefinitions(), rootNodeName, this.configurationReader.getConceptsSection().getFolderSpecs(), this.configurationReader.getDictionarySection(), this.configurationReader.getDataSection()); } private void truncateDataTables() throws SQLException { // Truncate the data tables // This is controlled by 'truncateTables' in conf.xml String truncateTables = this.configurationReader.getDictionarySection().get("truncateTables"); if (truncateTables == null || truncateTables.equalsIgnoreCase("true")) { // To do: table names should be parameterized in conf.xml and related to other data String queryId = this.query.getId(); Logger logger = I2b2ETLUtil.logger(); logger.log(Level.INFO, "Truncating data tables for query {0}", queryId); String[] dataschemaTables = {"OBSERVATION_FACT", "CONCEPT_DIMENSION", "PATIENT_DIMENSION", "PATIENT_MAPPING", "PROVIDER_DIMENSION", "VISIT_DIMENSION", "ENCOUNTER_MAPPING"}; Connection conn = openDatabaseConnection("dataschema"); try { for (String tableName : dataschemaTables) { truncateTable(conn, tableName); } conn.close(); conn = null; logger.log(Level.INFO, "Done truncating data tables for query {0}", queryId); } finally { if (conn != null) { try { conn.close(); } catch (SQLException sqle) { } } } logger.log(Level.INFO, "Truncating metadata tables for query {0}", queryId); conn = openDatabaseConnection("metaschema"); try { truncateTable(conn, this.configurationReader.getDictionarySection().get("metaTableName")); // metaTableName in conf.xml conn.close(); conn = null; logger.log(Level.INFO, "Done truncating metadata tables for query {0}", queryId); } finally { if (conn != null) { try { conn.close(); } catch (SQLException sqle) { } } } } } private void truncateTable(Connection conn, String tableName) throws SQLException { Logger logger = I2b2ETLUtil.logger(); String queryId = this.query.getId(); try { String sql = "TRUNCATE TABLE " + tableName; if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "Executing the following SQL for query {0}: {1}", new Object[]{queryId, sql}); } Statement st = conn.createStatement(); try { st.execute(sql); st.close(); st = null; } finally { if (st != null) { try { st.close(); } catch (SQLException sqle) { } } } logger.log(Level.FINE, "Done executing SQL for query {0}", queryId); } catch (SQLException ex) { logger.log(Level.SEVERE, "An error occurred truncating the tables for query " + queryId, ex); throw ex; } } public Connection openDatabaseConnection(String schema) throws SQLException { DatabaseSection.DatabaseSpec db = this.configurationReader.getDatabaseSection().get(schema); Logger logger = I2b2ETLUtil.logger(); if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "Connecting to {0} as user {1} for query {2}", new Object[]{db.connect, db.user, this.query.getId()}); } return DriverManager.getConnection(db.connect, db.user, db.passwd); } private void persistMetadata() throws SQLException { String queryId = this.query.getId(); Logger logger = I2b2ETLUtil.logger(); logger.log(Level.INFO, "Populating metadata tables for query {0}", queryId); Connection cn = openDatabaseConnection("metaschema"); try { persistOntologyIntoI2B2Batch(this.ontologyModel, cn); cn.close(); cn = null; logger.log(Level.INFO, "Done populating metadata tables for query {0}", queryId); } finally { if (cn != null) { try { cn.close(); } catch (SQLException sqle) { } } } } private void persistOntologyIntoI2B2Batch(Metadata model, Connection cn) throws SQLException { // CREATE TABLE "I2B2" // ( // 1 "C_HLEVEL" NUMBER(22,0) NOT NULL ENABLE, // 2 "C_FULLNAME" VARCHAR2(700) NOT NULL ENABLE, // 3 "C_NAME" VARCHAR2(2000) NOT NULL ENABLE, // 4 "C_SYNONYM_CD" CHAR(1) NOT NULL ENABLE, // 5 "C_VISUALATTRIBUTES" CHAR(3) NOT NULL ENABLE, // 6 "C_TOTALNUM" NUMBER(22,0), // 7 "C_BASECODE" VARCHAR2(50), // 8 "C_METADATAXML" CLOB, // 9 "C_FACTTABLECOLUMN" VARCHAR2(50) NOT NULL ENABLE, // 10 "C_TABLENAME" VARCHAR2(50) NOT NULL ENABLE, // 11 "C_COLUMNNAME" VARCHAR2(50) NOT NULL ENABLE, // 12 "C_COLUMNDATATYPE" VARCHAR2(50) NOT NULL ENABLE, // 13 "C_OPERATOR" VARCHAR2(10) NOT NULL ENABLE, // 14 "C_DIMCODE" VARCHAR2(700) NOT NULL ENABLE, // 15 "C_COMMENT" CLOB, // 16 "C_TOOLTIP" VARCHAR2(900), // 17 "UPDATE_DATE" DATE NOT NULL ENABLE, // 18 "DOWNLOAD_DATE" DATE, // 19 "IMPORT_DATE" DATE, // 20 "SOURCESYSTEM_CD" VARCHAR2(50), // 21 "VALUETYPE_CD" VARCHAR2(50) // ) int idx = 0; int plus = 0; int minus = 0; String tableName = this.configurationReader.getDictionarySection().get("metaTableName"); int batchNumber = 0; Logger logger = I2b2ETLUtil.logger(); try { logger.log(Level.FINE, "batch inserting on table {0}", tableName); PreparedStatement ps; ps = cn.prepareStatement("insert into " + tableName + " values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)"); try { @SuppressWarnings("unchecked") Enumeration<Concept> emu = model.getRoot().depthFirstEnumeration(); /* * A depth-first enumeration should traverse the hierarchies * in the order in which they were created. */ Timestamp importTimestamp = new Timestamp(System.currentTimeMillis()); Set<String> conceptCodes = new HashSet<String>(); while (emu.hasMoreElements()) { Concept concept = emu.nextElement(); ps.setLong(1, concept.getLevel()); ps.setString(2, concept.getI2B2Path()); assert concept.getDisplayName() != null && concept.getDisplayName().length() > 0 : "concept " + concept.getConceptCode() + " (" + concept.getI2B2Path() + ") " + " has an invalid display name '" + concept.getDisplayName() + "'"; ps.setString(3, concept.getDisplayName()); String conceptCode = concept.getConceptCode(); if (conceptCodes.add(conceptCode)) { ps.setString(4, SynonymCode.NOT_SYNONYM.getCode()); } else { ps.setString(4, SynonymCode.SYNONYM.getCode()); } ps.setString(5, concept.getCVisualAttributes()); ps.setObject(6, null); ps.setString(7, conceptCode); // put labParmXml here // ps.setObject(8, null); ps.setString(9, "concept_cd"); ps.setString(10, "concept_dimension"); ps.setString(11, "concept_path"); ps.setString(12, concept.getDataType().getCode()); ps.setString(13, concept.getOperator().getSQLOperator()); ps.setString(14, concept.getDimCode()); ps.setObject(15, null); ps.setString(16, null); ps.setTimestamp(17, importTimestamp); ps.setDate(18, null); ps.setTimestamp(19, importTimestamp); ps.setString(20, MetadataUtil.toSourceSystemCode( concept.getSourceSystemCode())); ps.setString(21, concept.getValueTypeCode().getCode()); ps.addBatch(); if ((++idx % 8192) == 0) { importTimestamp = new Timestamp(System.currentTimeMillis()); batchNumber++; ps.executeBatch(); cn.commit(); idx = 0; plus += 8192; logBatch(tableName, batchNumber); ps.clearBatch(); if (logger.isLoggable(Level.FINE)) { logger.log(Level.FINE, "loaded ontology {0}:{1}", new Object[]{plus, minus}); } } } batchNumber++; ps.executeBatch(); ps.close(); ps = null; } finally { if (ps != null) { try { ps.close(); } catch (SQLException sqle) { } } } cn.commit(); cn.close(); cn = null; logBatch(tableName, batchNumber); logger.log(Level.FINE, "TALLY_META_{0}_PM: {1}:{2}", new Object[]{tableName, plus, minus}); } catch (SQLException e) { logger.log(Level.SEVERE, "Batch failed on OntologyTable " + tableName + ". I2B2 will not be correct.", e); throw e; } finally { if (cn != null) { try { cn.close(); } catch (SQLException sqle) { } } } } private static void logBatch(String tableName, int batchNumber) { Logger logger = I2b2ETLUtil.logger(); if (logger.isLoggable(Level.FINEST)) { Object[] args = new Object[]{tableName, batchNumber}; logger.log(Level.FINEST, "DB_{0}_BATCH={1}", args); } } public String[] getPropositionIdsNeeded() throws KnowledgeSourceReadException { if (!this.inferPropositionIdsNeeded) { return ArrayUtils.EMPTY_STRING_ARRAY; } else { Set<String> result = new HashSet<String>(); DictionarySection dictionarySection = this.configurationReader.getDictionarySection(); String visitPropId = dictionarySection.get("visitDimension"); result.add(visitPropId); PropositionDefinition visitProp = this.knowledgeSource.readPropositionDefinition(visitPropId); DataSection dataSection = this.configurationReader.getDataSection(); for (DataSpec dataSpec : dataSection.getAll()) { if (dataSpec.referenceName != null) { ReferenceDefinition refDef = visitProp.referenceDefinition(dataSpec.referenceName); if (refDef == null) { throw new KnowledgeSourceReadException( "missing reference " + dataSpec.referenceName + " for proposition definition " + visitPropId + " for query " + this.query.getId()); } org.arp.javautil.arrays.Arrays.addAll(result, refDef.getPropositionIds()); } } ConceptsSection conceptsSection = this.configurationReader.getConceptsSection(); for (FolderSpec folderSpec : conceptsSection.getFolderSpecs()) { for (String proposition : folderSpec.propositions) { result.add(proposition); } } for (PropositionDefinition pd : this.query.getPropositionDefinitions()) { result.add(pd.getId()); } return result.toArray(new String[result.size()]); } } }
Added @Override.
src/main/java/edu/emory/cci/aiw/i2b2etl/I2B2QueryResultsHandler.java
Added @Override.
<ide><path>rc/main/java/edu/emory/cci/aiw/i2b2etl/I2B2QueryResultsHandler.java <ide> } <ide> } <ide> <add> @Override <ide> public String[] getPropositionIdsNeeded() <ide> throws KnowledgeSourceReadException { <ide> if (!this.inferPropositionIdsNeeded) {
Java
apache-2.0
172afd07b2d482209cb89a260fa8ca6d67f7dc2d
0
AndrewKhitrin/dbeaver,liuyuanyuan/dbeaver,liuyuanyuan/dbeaver,liuyuanyuan/dbeaver,AndrewKhitrin/dbeaver,AndrewKhitrin/dbeaver,AndrewKhitrin/dbeaver,liuyuanyuan/dbeaver,liuyuanyuan/dbeaver
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2017 Serge Rider ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.controls.resultset.valuefilter; import java.util.ArrayList; import org.eclipse.jface.action.Action; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.jface.viewers.CheckboxTableViewer; import org.eclipse.jface.viewers.ColumnLabelProvider; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.data.DBDAttributeBinding; import org.jkiss.dbeaver.model.data.DBDDisplayFormat; import org.jkiss.dbeaver.model.data.DBDLabelValuePair; import org.jkiss.dbeaver.model.exec.DBCLogicalOperator; import org.jkiss.dbeaver.runtime.ui.DBUserInterface; import org.jkiss.dbeaver.ui.DBeaverIcons; import org.jkiss.dbeaver.ui.UIIcon; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.controls.ViewerColumnController; import org.jkiss.dbeaver.ui.controls.resultset.ResultSetRow; import org.jkiss.dbeaver.ui.controls.resultset.ResultSetUtils; import org.jkiss.dbeaver.ui.controls.resultset.ResultSetValueController; import org.jkiss.dbeaver.ui.controls.resultset.ResultSetViewer; import org.jkiss.dbeaver.ui.data.IValueController; import org.jkiss.dbeaver.ui.dialogs.BaseDialog; public class FilterValueEditDialog extends BaseDialog{ private static final String DIALOG_ID = "DBeaver.FilterValueEditDialog";//$NON-NLS-1$ private GenericFilterValueEdit handler; private Object value; private static final Log log = Log.getLog(FilterValueEditDialog.class); public FilterValueEditDialog(ResultSetViewer viewer, DBDAttributeBinding attr, ResultSetRow[] rows, DBCLogicalOperator operator) { super(viewer.getControl().getShell(), "Edit value", null); handler = new GenericFilterValueEdit(viewer, attr, rows, operator); } @Override protected IDialogSettings getDialogBoundsSettings() { return UIUtils.getDialogSettings(DIALOG_ID + "." + handler.operator.name()); } @Override protected Composite createDialogArea(Composite parent) { Composite composite = super.createDialogArea(parent); Label label = UIUtils.createControlLabel(composite, handler.attr.getName() + " " + handler.operator.getStringValue()); label.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); int argumentCount = handler.operator.getArgumentCount(); if (argumentCount == 1) { createSingleValueEditor(composite); } else if (argumentCount < 0) { createMultiValueSelector(composite); } return parent; } private void createSingleValueEditor(Composite composite) { Composite editorPlaceholder = UIUtils.createPlaceholder(composite, 1); editorPlaceholder.setLayoutData(new GridData(GridData.FILL_BOTH)); editorPlaceholder.setLayout(new FillLayout()); ResultSetRow singleRow = handler.rows[0]; final ResultSetValueController valueController = new ResultSetValueController( handler.viewer, handler.attr, singleRow, IValueController.EditType.INLINE, editorPlaceholder) { @Override public boolean isReadOnly() { // Filter value is never read-only return false; } }; try { handler.editor = valueController.getValueManager().createEditor(valueController); if (handler.editor != null) { handler.editor.createControl(); handler.editor.primeEditorValue(valueController.getValue()); } } catch (DBException e) { log.error("Can't create inline value editor", e); } if (handler.editor == null) { handler.textControl = new Text(editorPlaceholder, SWT.BORDER | SWT.WRAP | SWT.V_SCROLL); handler.textControl.setText(""); GridData gd = new GridData(GridData.FILL_BOTH); gd.widthHint = 300; gd.heightHint = 300; gd.minimumHeight = 100; gd.minimumWidth = 100; handler.textControl.setLayoutData(gd); } } private void createMultiValueSelector(Composite composite) { GridData layoutData = new GridData(GridData.FILL_BOTH); layoutData.widthHint = 400; layoutData.heightHint = 300; handler.setupTable(composite, SWT.BORDER | SWT.MULTI | SWT.CHECK | SWT.FULL_SELECTION, true, true, layoutData); ViewerColumnController columnController = new ViewerColumnController(getClass().getName(), handler.table); columnController.addColumn("Value", "Value", SWT.LEFT, true, true, new ColumnLabelProvider() { @Override public String getText(Object element) { return handler.attr.getValueHandler().getValueDisplayString(handler.attr, ((DBDLabelValuePair)element).getValue(), DBDDisplayFormat.UI); } }); columnController.addColumn("Description", "Row description (composed from dictionary columns)", SWT.LEFT, true, true, new ColumnLabelProvider() { @Override public String getText(Object element) { return ((DBDLabelValuePair)element).getLabel(); } }); columnController.createColumns(); Action[] elements = new Action[] { new Action("Select &All") { @Override public void run() { for (TableItem item : handler.table.getTable().getItems()) { item.setChecked(true); } } }, new Action("Select &None") { @Override public void run() { for (TableItem item : handler.table.getTable().getItems()) { item.setChecked(false); } } } }; handler.addContextMenu(elements); handler.addFilterTextbox(composite); handler.filterPattern = null; handler.loadValues(); } public Object getValue() { return value; } @Override protected void createButtonsForButtonBar(Composite parent) { if (handler.operator.getArgumentCount() == 1) { Button copyButton = createButton(parent, IDialogConstants.DETAILS_ID, "Clipboard", false); copyButton.setImage(DBeaverIcons.getImage(UIIcon.FILTER_CLIPBOARD)); } createButton(parent, IDialogConstants.OK_ID, IDialogConstants.OK_LABEL, true); createButton(parent, IDialogConstants.CANCEL_ID, IDialogConstants.CANCEL_LABEL, false); } @Override protected void buttonPressed(int buttonId) { if (buttonId == IDialogConstants.DETAILS_ID) { try { Object value = ResultSetUtils.getAttributeValueFromClipboard(handler.attr); handler.editor.primeEditorValue(value); } catch (DBException e) { DBUserInterface.getInstance().showError("Copy from clipboard", "Can't copy value", e); } } else { super.buttonPressed(buttonId); } } @Override protected void okPressed() { if (handler.table != null) { java.util.List<Object> values = new ArrayList<>(); for (DBDLabelValuePair item : handler.getMultiValues()) { if ( ((TableItem)handler.table.testFindItem(item)).getChecked()) { values.add(item.getValue()); } } value = values.toArray(); } else if (handler.editor != null) { try { value = handler.editor.extractEditorValue(); } catch (DBException e) { log.error("Can't get editor value", e); } } else { value = handler.textControl.getText(); } super.okPressed(); } }
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/valuefilter/FilterValueEditDialog.java
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2017 Serge Rider ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.controls.resultset.valuefilter; import java.util.ArrayList; import org.eclipse.jface.action.Action; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.jface.viewers.CheckboxTableViewer; import org.eclipse.jface.viewers.ColumnLabelProvider; import org.eclipse.swt.SWT; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.data.DBDAttributeBinding; import org.jkiss.dbeaver.model.data.DBDDisplayFormat; import org.jkiss.dbeaver.model.data.DBDLabelValuePair; import org.jkiss.dbeaver.model.exec.DBCLogicalOperator; import org.jkiss.dbeaver.runtime.ui.DBUserInterface; import org.jkiss.dbeaver.ui.DBeaverIcons; import org.jkiss.dbeaver.ui.UIIcon; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.dbeaver.ui.controls.ViewerColumnController; import org.jkiss.dbeaver.ui.controls.resultset.ResultSetRow; import org.jkiss.dbeaver.ui.controls.resultset.ResultSetUtils; import org.jkiss.dbeaver.ui.controls.resultset.ResultSetValueController; import org.jkiss.dbeaver.ui.controls.resultset.ResultSetViewer; import org.jkiss.dbeaver.ui.data.IValueController; import org.jkiss.dbeaver.ui.dialogs.BaseDialog; public class FilterValueEditDialog extends BaseDialog{ private static final String DIALOG_ID = "DBeaver.FilterValueEditDialog";//$NON-NLS-1$ private GenericFilterValueEdit handler; private Object value; private static final Log log = Log.getLog(FilterValueEditDialog.class); public FilterValueEditDialog(ResultSetViewer viewer, DBDAttributeBinding attr, ResultSetRow[] rows, DBCLogicalOperator operator) { super(viewer.getControl().getShell(), "Edit value", null); handler = new GenericFilterValueEdit(viewer, attr, rows, operator); } @Override protected IDialogSettings getDialogBoundsSettings() { return UIUtils.getDialogSettings(DIALOG_ID + "." + handler.operator.name()); } @Override protected Composite createDialogArea(Composite parent) { Composite composite = super.createDialogArea(parent); Label label = UIUtils.createControlLabel(composite, handler.attr.getName() + " " + handler.operator.getStringValue()); label.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); int argumentCount = handler.operator.getArgumentCount(); if (argumentCount == 1) { createSingleValueEditor(composite); } else if (argumentCount < 0) { createMultiValueSelector(composite); } return parent; } private void createSingleValueEditor(Composite composite) { Composite editorPlaceholder = UIUtils.createPlaceholder(composite, 1); editorPlaceholder.setLayoutData(new GridData(GridData.FILL_BOTH)); editorPlaceholder.setLayout(new FillLayout()); ResultSetRow singleRow = handler.rows[0]; final ResultSetValueController valueController = new ResultSetValueController( handler.viewer, handler.attr, singleRow, IValueController.EditType.INLINE, editorPlaceholder) { @Override public boolean isReadOnly() { // Filter value is never read-only return false; } }; try { handler.editor = valueController.getValueManager().createEditor(valueController); if (handler.editor != null) { handler.editor.createControl(); handler.editor.primeEditorValue(valueController.getValue()); } } catch (DBException e) { log.error("Can't create inline value editor", e); } if (handler.editor == null) { handler.textControl = new Text(editorPlaceholder, SWT.BORDER | SWT.WRAP | SWT.V_SCROLL); handler.textControl.setText(""); GridData gd = new GridData(GridData.FILL_BOTH); gd.widthHint = 300; gd.heightHint = 300; gd.minimumHeight = 100; gd.minimumWidth = 100; handler.textControl.setLayoutData(gd); } } private void createMultiValueSelector(Composite composite) { GridData layoutData = new GridData(GridData.FILL_BOTH); layoutData.widthHint = 400; layoutData.heightHint = 300; handler.setupTable(composite, SWT.BORDER | SWT.MULTI | SWT.CHECK | SWT.FULL_SELECTION, true, true, layoutData); ViewerColumnController columnController = new ViewerColumnController(getClass().getName(), handler.table); columnController.addColumn("Value", "Value", SWT.LEFT, true, true, new ColumnLabelProvider() { @Override public String getText(Object element) { return handler.attr.getValueHandler().getValueDisplayString(handler.attr, ((DBDLabelValuePair)element).getValue(), DBDDisplayFormat.UI); } }); columnController.addColumn("Description", "Row description (composed from dictionary columns)", SWT.LEFT, true, true, new ColumnLabelProvider() { @Override public String getText(Object element) { return ((DBDLabelValuePair)element).getLabel(); } }); columnController.createColumns(); Action[] elements = new Action[] { new Action("Select &All") { @Override public void run() { for (DBDLabelValuePair row : handler.getMultiValues()) { ((CheckboxTableViewer) handler.table).setChecked(row, true); } } }, new Action("Select &None") { @Override public void run() { for (DBDLabelValuePair row : handler.getMultiValues()) { ((CheckboxTableViewer) handler.table).setChecked(row, false); } } } }; handler.addContextMenu(elements); handler.addFilterTextbox(composite); handler.filterPattern = null; handler.loadValues(); } public Object getValue() { return value; } @Override protected void createButtonsForButtonBar(Composite parent) { if (handler.operator.getArgumentCount() == 1) { Button copyButton = createButton(parent, IDialogConstants.DETAILS_ID, "Clipboard", false); copyButton.setImage(DBeaverIcons.getImage(UIIcon.FILTER_CLIPBOARD)); } createButton(parent, IDialogConstants.OK_ID, IDialogConstants.OK_LABEL, true); createButton(parent, IDialogConstants.CANCEL_ID, IDialogConstants.CANCEL_LABEL, false); } @Override protected void buttonPressed(int buttonId) { if (buttonId == IDialogConstants.DETAILS_ID) { try { Object value = ResultSetUtils.getAttributeValueFromClipboard(handler.attr); handler.editor.primeEditorValue(value); } catch (DBException e) { DBUserInterface.getInstance().showError("Copy from clipboard", "Can't copy value", e); } } else { super.buttonPressed(buttonId); } } @Override protected void okPressed() { if (handler.table != null) { java.util.List<Object> values = new ArrayList<>(); for (DBDLabelValuePair item : handler.getMultiValues()) { if ( ((TableItem)handler.table.testFindItem(item)).getChecked()) { values.add(item.getValue()); } } value = values.toArray(); } else if (handler.editor != null) { try { value = handler.editor.extractEditorValue(); } catch (DBException e) { log.error("Can't get editor value", e); } } else { value = handler.textControl.getText(); } super.okPressed(); } }
RSV filter IN context menu fix (select all/none)
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/valuefilter/FilterValueEditDialog.java
RSV filter IN context menu fix (select all/none)
<ide><path>lugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/valuefilter/FilterValueEditDialog.java <ide> new Action("Select &All") { <ide> @Override <ide> public void run() { <del> for (DBDLabelValuePair row : handler.getMultiValues()) { <del> ((CheckboxTableViewer) handler.table).setChecked(row, true); <del> } <add> for (TableItem item : handler.table.getTable().getItems()) { <add> item.setChecked(true); <add> } <ide> } <ide> }, <ide> new Action("Select &None") { <ide> @Override <ide> public void run() { <del> for (DBDLabelValuePair row : handler.getMultiValues()) { <del> ((CheckboxTableViewer) handler.table).setChecked(row, false); <del> } <add> for (TableItem item : handler.table.getTable().getItems()) { <add> item.setChecked(false); <add> } <ide> } <ide> } <ide> };
Java
apache-2.0
b47f01d670028b7b5d1b50b95ee4e258c2924283
0
robinverduijn/gradle,blindpirate/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,lsmaira/gradle,gradle/gradle,gstevey/gradle,gradle/gradle,gstevey/gradle,lsmaira/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,gstevey/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gstevey/gradle,blindpirate/gradle,lsmaira/gradle,gstevey/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gstevey/gradle,lsmaira/gradle,gstevey/gradle,blindpirate/gradle,lsmaira/gradle,gstevey/gradle,lsmaira/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,lsmaira/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,gstevey/gradle,lsmaira/gradle,lsmaira/gradle,gradle/gradle,lsmaira/gradle
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.integtests.fixtures; import org.gradle.util.TestFile; import java.util.*; public class DaemonGradleExecuter extends ForkingGradleExecuter { public DaemonGradleExecuter(TestFile gradleHomeDir) { super(gradleHomeDir); } @Override protected List<String> getAllArgs() { List<String> args = new ArrayList<String>(); args.add("--daemon"); args.addAll(super.getAllArgs()); return args; } public GradleHandle<DaemonGradleExecuter> createHandle() { return new DaemonGradleHandle<DaemonGradleExecuter>(this); } protected static class DaemonGradleHandle<T extends DaemonGradleExecuter> extends ForkingGradleHandle<T> { public DaemonGradleHandle(T executer) { super(executer); } protected String transformStandardOutput(String output) { output = output.replace(String.format("Note: the Gradle build daemon is an experimental feature.%n"), ""); output = output.replace(String.format("As such, you may experience unexpected build failures. You may need to occasionally stop the daemon.%n"), ""); return output; } } }
subprojects/internal-integ-testing/src/main/groovy/org/gradle/integtests/fixtures/DaemonGradleExecuter.java
/* * Copyright 2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.integtests.fixtures; import org.gradle.util.TestFile; import java.io.File; import java.util.*; public class DaemonGradleExecuter extends ForkingGradleExecuter { public DaemonGradleExecuter(TestFile gradleHomeDir) { super(gradleHomeDir); } @Override protected List<String> getAllArgs() { List<String> args = new ArrayList<String>(); args.add("--daemon"); args.addAll(super.getAllArgs()); return args; } public GradleHandle<DaemonGradleExecuter> createHandle() { return new DaemonGradleHandle<DaemonGradleExecuter>(this); } protected static class DaemonGradleHandle<T extends DaemonGradleExecuter> extends ForkingGradleHandle<T> { public DaemonGradleHandle(T executer) { super(executer); } protected String transformStandardOutput(String output) { output = output.replace(String.format("Note: the Gradle build daemon is an experimental feature.%n"), ""); output = output.replace(String.format("As such, you may experience unexpected build failures. You may need to occasionally stop the daemon.%n"), ""); return output; } } }
unused import.
subprojects/internal-integ-testing/src/main/groovy/org/gradle/integtests/fixtures/DaemonGradleExecuter.java
unused import.
<ide><path>ubprojects/internal-integ-testing/src/main/groovy/org/gradle/integtests/fixtures/DaemonGradleExecuter.java <ide> <ide> import org.gradle.util.TestFile; <ide> <del>import java.io.File; <ide> import java.util.*; <ide> <ide> public class DaemonGradleExecuter extends ForkingGradleExecuter {
Java
epl-1.0
95beeb00dbe340654121d1e078dc046dfa15081f
0
mdht/mdht,vadimnehta/mdht,drbgfc/mdht,vadimnehta/mdht,mdht/mdht,vadimnehta/mdht,vadimnehta/mdht,mdht/mdht,drbgfc/mdht,drbgfc/mdht,drbgfc/mdht,vadimnehta/mdht,mdht/mdht,drbgfc/mdht,mdht/mdht,vadimnehta/mdht,drbgfc/mdht
package org.openhealthtools.mdht.uml.cda.dita.internal; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.dita.dost.util.DitaUtil; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Text; import org.eclipse.uml2.uml.Class; import org.eclipse.uml2.uml.Constraint; import org.eclipse.uml2.uml.Stereotype; import org.openhealthtools.mdht.uml.cda.core.util.CDAProfileUtil; import org.openhealthtools.mdht.uml.cda.core.util.ICDAProfileConstants; import org.openhealthtools.mdht.uml.cda.dita.DitaTransformerOptions; import org.openhealthtools.mdht.uml.cda.dita.TransformClassContent; import org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor; public class TextEditor implements ConstraintEditor { private Text text; private Constraint constraint; private Button closeErrorTextButton; private Text errorText; private boolean checkDita = false; public void setText(Text text) { this.text = text; this.text.addFocusListener(new FocusListener() { public void focusLost(FocusEvent e) { checkDita = true; } public void focusGained(FocusEvent e) { // Not needed } }); this.text.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { handleChange(); } }); } /** * @return the value stored in the Dita Enabled constraint * true iff the constraint has a Boolean of true, false otherwise */ public boolean isDitaEnabled() { Boolean ditaEnabled = false; try { Stereotype stereotype = CDAProfileUtil.getAppliedCDAStereotype( constraint, ICDAProfileConstants.CONSTRAINT_VALIDATION); ditaEnabled = (Boolean) constraint.getValue(stereotype, ICDAProfileConstants.CONSTRAINT_DITA_ENABLED); } catch (IllegalArgumentException ex) { /* Swallow this */ } return ditaEnabled; } /* * (non-Javadoc) * * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setDitaEnabled(boolean) */ @Override public void setDitaEnabled(boolean isEnabled) { Stereotype stereotype = CDAProfileUtil.getAppliedCDAStereotype( constraint, ICDAProfileConstants.CONSTRAINT_VALIDATION); constraint.setValue(stereotype, ICDAProfileConstants.CONSTRAINT_DITA_ENABLED, isEnabled); } private void handleChange() { if (checkDita && isDitaEnabled()) { runHandleChange(); } } private void runHandleChange() { checkDita = false; IPath tmpFile = generateTempDita(); boolean errorOccured = false; try { DitaUtil.validate(tmpFile); } catch (Exception exception) { // Add UI here showError(exception.toString()); errorOccured = true; } finally { hideError(errorOccured); } // Delete the temporary folder try { FileUtils.deleteDirectory(tmpFile.toFile().getParentFile()); } catch (IOException ioEx) { ioEx.printStackTrace(); } } private void hideError(boolean errorOccured) { if (!errorOccured) { errorText.setVisible(false); closeErrorTextButton.setVisible(false); } } private void showError(String error) { errorText.setText(error); errorText.setVisible(true); closeErrorTextButton.setVisible(true); } /* * (non-Javadoc) * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setConstraint(org.eclipse.uml2.uml.Constraint) */ public void setConstraint(Constraint constraint) { boolean firstRun = this.constraint == null && constraint != null; this.constraint = constraint; this.checkDita = true; if (firstRun) { runHandleChange(); } else { handleChange(); } } private IPath generateTempDita() { IWorkspace workspace = ResourcesPlugin.getWorkspace(); IPath tmpFileInWorkspaceDir = workspace.getRoot().getLocation().append("tmp").append( constraint.getContext().getName()).addFileExtension("dita"); DitaTransformerOptions transformerOptions = new DitaTransformerOptions(); transformerOptions.setExampleDepth(0); TransformClassContent transformer = new TransformClassContent(transformerOptions); if (!tmpFileInWorkspaceDir.toFile().getParentFile().exists()) tmpFileInWorkspaceDir.toFile().getParentFile().mkdirs(); transformer.writeClassToFile((Class) constraint.getContext(), tmpFileInWorkspaceDir); return tmpFileInWorkspaceDir; } /* * (non-Javadoc) * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setErrorText(org.eclipse.swt.widgets.Text) */ @Override public void setErrorText(Text errorText) { this.errorText = errorText; } /* * (non-Javadoc) * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setCloseErrorText(org.eclipse.swt.widgets.Button) */ @Override public void setCloseErrorText(Button closeErrorTextButton) { this.closeErrorTextButton = closeErrorTextButton; } }
cda/plugins/org.openhealthtools.mdht.uml.cda.dita/src/org/openhealthtools/mdht/uml/cda/dita/internal/TextEditor.java
package org.openhealthtools.mdht.uml.cda.dita.internal; import java.io.IOException; import org.apache.commons.io.FileUtils; import org.dita.dost.util.DitaUtil; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.IPath; import org.eclipse.swt.events.FocusEvent; import org.eclipse.swt.events.FocusListener; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Text; import org.eclipse.uml2.uml.Class; import org.eclipse.uml2.uml.Constraint; import org.eclipse.uml2.uml.Stereotype; import org.openhealthtools.mdht.uml.cda.core.util.CDAProfileUtil; import org.openhealthtools.mdht.uml.cda.core.util.ICDAProfileConstants; import org.openhealthtools.mdht.uml.cda.dita.DitaTransformerOptions; import org.openhealthtools.mdht.uml.cda.dita.TransformClassContent; import org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor; public class TextEditor implements ConstraintEditor { private Text text; private Constraint constraint; private Button closeErrorTextButton; private Text errorText; private boolean checkDita = false; public void setText(Text text) { this.text = text; this.text.addFocusListener(new FocusListener() { public void focusLost(FocusEvent e) { checkDita = true; } public void focusGained(FocusEvent e) { // Not needed } }); this.text.addModifyListener(new ModifyListener() { public void modifyText(ModifyEvent e) { handleChange(); } }); } private boolean isDitaEnabled() { Boolean ditaEnabled = false; try { Stereotype stereotype = CDAProfileUtil.getAppliedCDAStereotype( constraint, ICDAProfileConstants.CONSTRAINT_VALIDATION); ditaEnabled = (Boolean) constraint.getValue(stereotype, ICDAProfileConstants.CONSTRAINT_DITA_ENABLED); } catch (IllegalArgumentException ex) { /* Swallow this */ } return ditaEnabled; } /* * (non-Javadoc) * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setDitaEnabled(boolean) */ @Override public void setDitaEnabled(boolean isEnabled) { Stereotype stereotype = CDAProfileUtil.getAppliedCDAStereotype( constraint, ICDAProfileConstants.CONSTRAINT_VALIDATION); constraint.setValue(stereotype, ICDAProfileConstants.CONSTRAINT_DITA_ENABLED, isEnabled); } private void handleChange() { if (checkDita && isDitaEnabled()) { runHandleChange(); } } private void runHandleChange() { checkDita = false; IPath tmpFile = generateTempDita(); boolean errorOccured = false; try { DitaUtil.validate(tmpFile); } catch (Exception exception) { // Add UI here showError(exception.toString()); errorOccured = true; } finally { hideError(errorOccured); } // Delete the temporary folder try { FileUtils.deleteDirectory(tmpFile.toFile().getParentFile()); } catch (IOException ioEx) { ioEx.printStackTrace(); } } private void hideError(boolean errorOccured) { if (!errorOccured) { errorText.setVisible(false); closeErrorTextButton.setVisible(false); } } private void showError(String error) { errorText.setText(error); errorText.setVisible(true); closeErrorTextButton.setVisible(true); } /* * (non-Javadoc) * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setConstraint(org.eclipse.uml2.uml.Constraint) */ public void setConstraint(Constraint constraint) { boolean firstRun = this.constraint == null && constraint != null; this.constraint = constraint; this.checkDita = true; if (firstRun) { runHandleChange(); } else { handleChange(); } } private IPath generateTempDita() { IWorkspace workspace = ResourcesPlugin.getWorkspace(); IPath tmpFileInWorkspaceDir = workspace.getRoot().getLocation().append("tmp").append( constraint.getContext().getName()).addFileExtension("dita"); DitaTransformerOptions transformerOptions = new DitaTransformerOptions(); transformerOptions.setExampleDepth(0); TransformClassContent transformer = new TransformClassContent(transformerOptions); if (!tmpFileInWorkspaceDir.toFile().getParentFile().exists()) tmpFileInWorkspaceDir.toFile().getParentFile().mkdirs(); transformer.writeClassToFile((Class) constraint.getContext(), tmpFileInWorkspaceDir); return tmpFileInWorkspaceDir; } /* * (non-Javadoc) * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setErrorText(org.eclipse.swt.widgets.Text) */ @Override public void setErrorText(Text errorText) { this.errorText = errorText; } /* * (non-Javadoc) * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setCloseErrorText(org.eclipse.swt.widgets.Button) */ @Override public void setCloseErrorText(Button closeErrorTextButton) { this.closeErrorTextButton = closeErrorTextButton; } }
fix crlf line endings
cda/plugins/org.openhealthtools.mdht.uml.cda.dita/src/org/openhealthtools/mdht/uml/cda/dita/internal/TextEditor.java
fix crlf line endings
<ide><path>da/plugins/org.openhealthtools.mdht.uml.cda.dita/src/org/openhealthtools/mdht/uml/cda/dita/internal/TextEditor.java <ide> }); <ide> } <ide> <del> private boolean isDitaEnabled() { <add> /** <add> * @return the value stored in the Dita Enabled constraint <add> * true iff the constraint has a Boolean of true, false otherwise <add> */ <add> public boolean isDitaEnabled() { <ide> Boolean ditaEnabled = false; <ide> try { <ide> Stereotype stereotype = CDAProfileUtil.getAppliedCDAStereotype( <ide> /* <ide> * (non-Javadoc) <ide> * <del> * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setDitaEnabled(boolean) <add> * * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setDitaEnabled(boolean) <ide> */ <ide> @Override <ide> public void setDitaEnabled(boolean isEnabled) { <ide> <ide> /* <ide> * (non-Javadoc) <del> * <add> * <ide> * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setErrorText(org.eclipse.swt.widgets.Text) <ide> */ <ide> @Override <ide> <ide> /* <ide> * (non-Javadoc) <del> * <add> * <ide> * @see org.openhealthtools.mdht.uml.ui.properties.internal.sections.ConstraintEditor#setCloseErrorText(org.eclipse.swt.widgets.Button) <ide> */ <ide> @Override
Java
apache-2.0
e76eeb0d2951928ad7abd947122fedc65ae661fd
0
cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x
/* * Copyright 2002-2006 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.orm.jpa.vendor; import java.sql.Connection; import java.sql.SQLException; import javax.persistence.EntityManager; import javax.persistence.PersistenceException; import org.hibernate.FlushMode; import org.hibernate.Session; import org.hibernate.ejb.HibernateEntityManager; import org.springframework.jdbc.datasource.ConnectionHandle; import org.springframework.jdbc.datasource.SimpleConnectionHandle; import org.springframework.orm.jpa.DefaultJpaDialect; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionException; /** * {@link org.springframework.orm.jpa.JpaDialect} implementation for * Hibernate EntityManager. Developed and tested against Hibernate 3.2. * * @author Costin Leau * @author Juergen Hoeller * @since 2.0 */ public class HibernateJpaDialect extends DefaultJpaDialect { public Object beginTransaction(EntityManager entityManager, TransactionDefinition definition) throws PersistenceException, SQLException, TransactionException { super.beginTransaction(entityManager, definition); Session session = getSession(entityManager); FlushMode flushMode = session.getFlushMode(); FlushMode previousFlushMode = null; if (definition.isReadOnly()) { // We should suppress flushing for a read-only transaction. session.setFlushMode(FlushMode.MANUAL); previousFlushMode = flushMode; } else { // We need AUTO or COMMIT for a non-read-only transaction. if (flushMode.lessThan(FlushMode.COMMIT)) { session.setFlushMode(FlushMode.AUTO); previousFlushMode = flushMode; } } return new SessionTransactionData(session, previousFlushMode); } public void cleanupTransaction(Object transactionData) { ((SessionTransactionData) transactionData).resetFlushMode(); } @Override public ConnectionHandle getJdbcConnection(EntityManager entityManager, boolean readOnly) throws PersistenceException, SQLException { Session session = getSession(entityManager); Connection con = session.connection(); return (con != null ? new SimpleConnectionHandle(con) : null); } protected Session getSession(EntityManager em) { return ((HibernateEntityManager) em).getSession(); } private static class SessionTransactionData { private final Session session; private final FlushMode previousFlushMode; public SessionTransactionData(Session session, FlushMode previousFlushMode) { this.session = session; this.previousFlushMode = previousFlushMode; } public void resetFlushMode() { if (this.previousFlushMode != null) { this.session.setFlushMode(this.previousFlushMode); } } } }
tiger/src/org/springframework/orm/jpa/vendor/HibernateJpaDialect.java
/* * Copyright 2002-2006 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.orm.jpa.vendor; import java.sql.Connection; import java.sql.SQLException; import javax.persistence.EntityManager; import javax.persistence.PersistenceException; import org.hibernate.Session; import org.hibernate.ejb.HibernateEntityManager; import org.springframework.jdbc.datasource.ConnectionHandle; import org.springframework.jdbc.datasource.SimpleConnectionHandle; import org.springframework.orm.jpa.DefaultJpaDialect; /** * {@link org.springframework.orm.jpa.JpaDialect} implementation for * Hibernate EntityManager. Developed and tested against Hibernate 3.2. * * @author Costin Leau * @author Juergen Hoeller * @since 2.0 */ public class HibernateJpaDialect extends DefaultJpaDialect { @Override public ConnectionHandle getJdbcConnection(EntityManager entityManager, boolean readOnly) throws PersistenceException, SQLException { Session session = getSession(entityManager); Connection con = session.connection(); return (con != null ? new SimpleConnectionHandle(con) : null); } protected Session getSession(EntityManager em) { return ((HibernateEntityManager) em).getSession(); } }
switch FlushMode to MANUAL in case of read-only, and enforce COMMIT or AUTO for read-write transaction git-svn-id: b619a0c99665f88f1afe72824344cefe9a1c8c90@12326 fd5a2b45-1f63-4059-99e9-3c7cb7fd75c8
tiger/src/org/springframework/orm/jpa/vendor/HibernateJpaDialect.java
switch FlushMode to MANUAL in case of read-only, and enforce COMMIT or AUTO for read-write transaction
<ide><path>iger/src/org/springframework/orm/jpa/vendor/HibernateJpaDialect.java <ide> import javax.persistence.EntityManager; <ide> import javax.persistence.PersistenceException; <ide> <add>import org.hibernate.FlushMode; <ide> import org.hibernate.Session; <ide> import org.hibernate.ejb.HibernateEntityManager; <ide> <ide> import org.springframework.jdbc.datasource.ConnectionHandle; <ide> import org.springframework.jdbc.datasource.SimpleConnectionHandle; <ide> import org.springframework.orm.jpa.DefaultJpaDialect; <add>import org.springframework.transaction.TransactionDefinition; <add>import org.springframework.transaction.TransactionException; <ide> <ide> /** <ide> * {@link org.springframework.orm.jpa.JpaDialect} implementation for <ide> * @since 2.0 <ide> */ <ide> public class HibernateJpaDialect extends DefaultJpaDialect { <add> <add> public Object beginTransaction(EntityManager entityManager, TransactionDefinition definition) <add> throws PersistenceException, SQLException, TransactionException { <add> <add> super.beginTransaction(entityManager, definition); <add> Session session = getSession(entityManager); <add> FlushMode flushMode = session.getFlushMode(); <add> FlushMode previousFlushMode = null; <add> if (definition.isReadOnly()) { <add> // We should suppress flushing for a read-only transaction. <add> session.setFlushMode(FlushMode.MANUAL); <add> previousFlushMode = flushMode; <add> } <add> else { <add> // We need AUTO or COMMIT for a non-read-only transaction. <add> if (flushMode.lessThan(FlushMode.COMMIT)) { <add> session.setFlushMode(FlushMode.AUTO); <add> previousFlushMode = flushMode; <add> } <add> } <add> return new SessionTransactionData(session, previousFlushMode); <add> } <add> <add> public void cleanupTransaction(Object transactionData) { <add> ((SessionTransactionData) transactionData).resetFlushMode(); <add> } <ide> <ide> @Override <ide> public ConnectionHandle getJdbcConnection(EntityManager entityManager, boolean readOnly) <ide> return ((HibernateEntityManager) em).getSession(); <ide> } <ide> <add> <add> private static class SessionTransactionData { <add> <add> private final Session session; <add> <add> private final FlushMode previousFlushMode; <add> <add> public SessionTransactionData(Session session, FlushMode previousFlushMode) { <add> this.session = session; <add> this.previousFlushMode = previousFlushMode; <add> } <add> <add> public void resetFlushMode() { <add> if (this.previousFlushMode != null) { <add> this.session.setFlushMode(this.previousFlushMode); <add> } <add> } <add> } <add> <ide> }
Java
apache-2.0
6125a005074facbb74234546d38637ac5ccd9766
0
mgormley/pacaya,mgormley/pacaya
package edu.jhu.srl; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.log4j.Logger; import edu.jhu.data.concrete.SimpleAnnoSentenceCollection; import edu.jhu.data.conll.CoNLL09FileReader; import edu.jhu.data.conll.CoNLL09Sentence; import edu.jhu.data.conll.SrlGraph; import edu.jhu.data.conll.SrlGraph.SrlEdge; import edu.jhu.featurize.SentFeatureExtractor; import edu.jhu.featurize.SentFeatureExtractor.SentFeatureExtractorPrm; import edu.jhu.gm.Feature; import edu.jhu.gm.FeatureExtractor; import edu.jhu.gm.FgExample; import edu.jhu.gm.FgExamples; import edu.jhu.gm.ProjDepTreeFactor.LinkVar; import edu.jhu.gm.Var.VarType; import edu.jhu.gm.VarConfig; import edu.jhu.srl.SrlFactorGraph.RoleVar; import edu.jhu.srl.SrlFactorGraph.SrlFactorGraphPrm; import edu.jhu.srl.SrlFeatureExtractor.SrlFeatureExtractorPrm; import edu.jhu.util.Alphabet; import edu.jhu.util.CountingAlphabet; /** * Factory for FgExamples. * * @author mgormley * @author mmitchell */ public class SrlFgExamplesBuilder { public static class SrlFgExampleBuilderPrm { /* These provide default values during testing; otherwise, * values should be defined by SrlRunner. */ public SrlFactorGraphPrm fgPrm = new SrlFactorGraphPrm(); public SentFeatureExtractorPrm fePrm = new SentFeatureExtractorPrm(); public SrlFeatureExtractorPrm srlFePrm = new SrlFeatureExtractorPrm(); /** Whether to include unsupported features. */ public boolean includeUnsupportedFeatures = false; /** * Minimum number of times (inclusive) a feature must occur in training * to be included in the model. Ignored if non-positive. (Using this * cutoff implies that unsupported features will not be included.) */ public int featCountCutoff = -1; } private static final Logger log = Logger.getLogger(SrlFgExamplesBuilder.class); private SrlFgExampleBuilderPrm prm; private Alphabet<Feature> alphabet; private CorpusStatistics cs; public SrlFgExamplesBuilder(SrlFgExampleBuilderPrm prm, Alphabet<Feature> alphabet, CorpusStatistics cs) { this.prm = prm; this.alphabet = alphabet; this.cs = cs; } public FgExamples getData(SimpleAnnoSentenceCollection sents) { throw new RuntimeException("Not implemented"); } public FgExamples getData(CoNLL09FileReader reader) { List<CoNLL09Sentence> sents = reader.readAll(); return getData(sents); } public void preprocess(List<CoNLL09Sentence> sents) { if (!(alphabet.isGrowing() && prm.featCountCutoff > 0)) { // Skip this preprocessing step since it will have no effect. return; } CountingAlphabet<Feature> counter = new CountingAlphabet<Feature>(); Alphabet<String> obsAlphabet = new Alphabet<String>(); List<FeatureExtractor> featExts = new ArrayList<FeatureExtractor>(); for (int i=0; i<sents.size(); i++) { CoNLL09Sentence sent = sents.get(i); if (i % 1000 == 0 && i > 0) { log.debug("Preprocessed " + i + " examples..."); } // Precompute a few things. SrlGraph srlGraph = sent.getSrlGraph(); Set<Integer> knownPreds = getKnownPreds(srlGraph); // Construct the factor graph. SrlFactorGraph sfg = new SrlFactorGraph(prm.fgPrm, sent.size(), knownPreds, cs.roleStateNames); // Get the variable assignments given in the training data. VarConfig trainConfig = getTrainAssignment(sent, srlGraph, sfg); FgExample ex = new FgExample(sfg, trainConfig); // Create a feature extractor for this example. SentFeatureExtractor sentFeatExt = new SentFeatureExtractor(prm.fePrm, sent, cs, obsAlphabet); FeatureExtractor featExtractor = new SrlFeatureExtractor(prm.srlFePrm, sfg, counter, sentFeatExt); // So we don't have to compute the features again for this example. featExts.add(featExtractor); // Cache only the features observed in training data. ex.cacheLatFeats(sfg, trainConfig, featExtractor); } for (int i=0; i<counter.size(); i++) { int count = counter.lookupObjectCount(i); Feature feat = counter.lookupObject(i); if (count >= prm.featCountCutoff || feat.isBiasFeature()) { alphabet.lookupIndex(feat); } } alphabet.stopGrowth(); } public FgExamples getData(List<CoNLL09Sentence> sents) { preprocess(sents); Alphabet<String> obsAlphabet = new Alphabet<String>(); List<FeatureExtractor> featExts = new ArrayList<FeatureExtractor>(); FgExamples data = new FgExamples(alphabet); for (int i=0; i<sents.size(); i++) { CoNLL09Sentence sent = sents.get(i); if (i % 1000 == 0 && i > 0) { log.debug("Built " + i + " examples..."); } // Precompute a few things. SrlGraph srlGraph = sent.getSrlGraph(); Set<Integer> knownPreds = getKnownPreds(srlGraph); // Construct the factor graph. SrlFactorGraph sfg = new SrlFactorGraph(prm.fgPrm, sent.size(), knownPreds, cs.roleStateNames); // Get the variable assignments given in the training data. VarConfig trainConfig = getTrainAssignment(sent, srlGraph, sfg); FgExample ex = new FgExample(sfg, trainConfig); // Create a feature extractor for this example. SentFeatureExtractor sentFeatExt = new SentFeatureExtractor(prm.fePrm, sent, cs, obsAlphabet); FeatureExtractor featExtractor = new SrlFeatureExtractor(prm.srlFePrm, sfg, alphabet, sentFeatExt); // So we don't have to compute the features again for this example. featExts.add(featExtractor); // Cache only the features observed in training data. ex.cacheLatFeats(sfg, trainConfig, featExtractor); data.add(ex); } if (!prm.includeUnsupportedFeatures) { alphabet.stopGrowth(); } // Cache features for all the other variable assignments. for (int i=0; i<data.size(); i++) { if (i % 1000 == 0 && i > 0) { log.debug("Cached features for " + i + " examples..."); } FgExample ex = data.get(i); CoNLL09Sentence sent = sents.get(i); SrlGraph srlGraph = sent.getSrlGraph(); SrlFactorGraph sfg = (SrlFactorGraph) ex.getOriginalFactorGraph(); VarConfig trainConfig = getTrainAssignment(sent, srlGraph, sfg); FeatureExtractor featExtractor = featExts.get(i); ex.cacheLatPredFeats(sfg, trainConfig, featExtractor); } log.info("Num observation functions: " + obsAlphabet.size()); data.setSourceSentences(sents); return data; } private static Set<Integer> getKnownPreds(SrlGraph srlGraph) { List<SrlEdge> srlEdges = srlGraph.getEdges(); Set<Integer> knownPreds = new HashSet<Integer>(); // All the "Y"s for (SrlEdge e : srlEdges) { Integer a = e.getPred().getPosition(); knownPreds.add(a); } return knownPreds; } private VarConfig getTrainAssignment(CoNLL09Sentence sent, SrlGraph srlGraph, SrlFactorGraph sfg) { VarConfig vc = new VarConfig(); // Add all the training data assignments to the link variables, if they are not latent. // // IMPORTANT NOTE: We include the case where the parent is the Wall node (position -1). int[] parents = cs.prm.useGoldSyntax ? sent.getParentsFromHead() : sent.getParentsFromPhead(); for (int i=-1; i<sent.size(); i++) { for (int j=0; j<sent.size(); j++) { if (j != i && sfg.getLinkVar(i, j) != null) { LinkVar linkVar = sfg.getLinkVar(i, j); if (linkVar.getType() != VarType.LATENT) { // Syntactic head, from dependency parse. int state; if (parents[j] != i) { state = LinkVar.FALSE; } else { state = LinkVar.TRUE; } vc.put(linkVar, state); } } } } // Add all the training data assignments to the role variables, if they are not latent. // First, just set all the role names to "_". for (int i=0; i<sent.size(); i++) { for (int j=0; j<sent.size(); j++) { RoleVar roleVar = sfg.getRoleVar(i, j); if (roleVar != null && roleVar.getType() != VarType.LATENT) { vc.put(roleVar, "_"); } } } // Then set the ones which are observed. for (SrlEdge edge : srlGraph.getEdges()) { int parent = edge.getPred().getPosition(); int child = edge.getArg().getPosition(); String roleName = edge.getLabel(); RoleVar roleVar = sfg.getRoleVar(parent, child); if (roleVar != null && roleVar.getType() != VarType.LATENT) { int roleNameIdx = roleVar.getState(roleName); // TODO: This isn't quite right...we should really store the actual role name here. if (roleNameIdx == -1) { vc.put(roleVar, CorpusStatistics.UNKNOWN_ROLE); } else { vc.put(roleVar, roleNameIdx); } } } return vc; } }
src/main/java/edu/jhu/srl/SrlFgExamplesBuilder.java
package edu.jhu.srl; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.log4j.Logger; import edu.jhu.data.concrete.SimpleAnnoSentenceCollection; import edu.jhu.data.conll.CoNLL09FileReader; import edu.jhu.data.conll.CoNLL09Sentence; import edu.jhu.data.conll.SrlGraph; import edu.jhu.data.conll.SrlGraph.SrlEdge; import edu.jhu.featurize.SentFeatureExtractor; import edu.jhu.featurize.SentFeatureExtractor.SentFeatureExtractorPrm; import edu.jhu.gm.Feature; import edu.jhu.gm.FeatureExtractor; import edu.jhu.gm.FgExample; import edu.jhu.gm.FgExamples; import edu.jhu.gm.ProjDepTreeFactor.LinkVar; import edu.jhu.gm.Var.VarType; import edu.jhu.gm.VarConfig; import edu.jhu.srl.SrlFactorGraph.RoleVar; import edu.jhu.srl.SrlFactorGraph.SrlFactorGraphPrm; import edu.jhu.srl.SrlFeatureExtractor.SrlFeatureExtractorPrm; import edu.jhu.util.Alphabet; import edu.jhu.util.CountingAlphabet; /** * Factory for FgExamples. * * @author mgormley * @author mmitchell */ public class SrlFgExamplesBuilder { public static class SrlFgExampleBuilderPrm { /* These provide default values during testing; otherwise, * values should be defined by SrlRunner. */ public SrlFactorGraphPrm fgPrm = new SrlFactorGraphPrm(); public SentFeatureExtractorPrm fePrm = new SentFeatureExtractorPrm(); public SrlFeatureExtractorPrm srlFePrm = new SrlFeatureExtractorPrm(); /** Whether to include unsupported features. */ public boolean includeUnsupportedFeatures = false; /** * Minimum number of times (inclusive) a feature must occur in training * to be included in the model. Ignored if non-positive. (Using this * cutoff implies that unsupported features will not be included.) */ public int featCountCutoff = -1; } private static final Logger log = Logger.getLogger(SrlFgExamplesBuilder.class); private SrlFgExampleBuilderPrm prm; private Alphabet<Feature> alphabet; private CorpusStatistics cs; public SrlFgExamplesBuilder(SrlFgExampleBuilderPrm prm, Alphabet<Feature> alphabet, CorpusStatistics cs) { this.prm = prm; this.alphabet = alphabet; this.cs = cs; } public FgExamples getData(SimpleAnnoSentenceCollection sents) { throw new RuntimeException("Not implemented"); } public FgExamples getData(CoNLL09FileReader reader) { List<CoNLL09Sentence> sents = reader.readAll(); return getData(sents); } public void preprocess(List<CoNLL09Sentence> sents) { if (!(alphabet.isGrowing() && prm.featCountCutoff > 0)) { // Skip this preprocessing step since it will have no effect. return; } CountingAlphabet<Feature> counter = new CountingAlphabet<Feature>(); Alphabet<String> obsAlphabet = new Alphabet<String>(); List<FeatureExtractor> featExts = new ArrayList<FeatureExtractor>(); for (int i=0; i<sents.size(); i++) { CoNLL09Sentence sent = sents.get(i); if (i % 1000 == 0 && i > 0) { log.debug("Built " + i + " examples..."); } // Precompute a few things. SrlGraph srlGraph = sent.getSrlGraph(); Set<Integer> knownPreds = getKnownPreds(srlGraph); // Construct the factor graph. SrlFactorGraph sfg = new SrlFactorGraph(prm.fgPrm, sent.size(), knownPreds, cs.roleStateNames); // Get the variable assignments given in the training data. VarConfig trainConfig = getTrainAssignment(sent, srlGraph, sfg); FgExample ex = new FgExample(sfg, trainConfig); // Create a feature extractor for this example. SentFeatureExtractor sentFeatExt = new SentFeatureExtractor(prm.fePrm, sent, cs, obsAlphabet); FeatureExtractor featExtractor = new SrlFeatureExtractor(prm.srlFePrm, sfg, counter, sentFeatExt); // So we don't have to compute the features again for this example. featExts.add(featExtractor); // Cache only the features observed in training data. ex.cacheLatFeats(sfg, trainConfig, featExtractor); } for (int i=0; i<counter.size(); i++) { int count = counter.lookupObjectCount(i); Feature feat = counter.lookupObject(i); if (count >= prm.featCountCutoff || feat.isBiasFeature()) { alphabet.lookupIndex(feat); } } alphabet.stopGrowth(); } public FgExamples getData(List<CoNLL09Sentence> sents) { preprocess(sents); Alphabet<String> obsAlphabet = new Alphabet<String>(); List<FeatureExtractor> featExts = new ArrayList<FeatureExtractor>(); FgExamples data = new FgExamples(alphabet); for (int i=0; i<sents.size(); i++) { CoNLL09Sentence sent = sents.get(i); if (i % 1000 == 0 && i > 0) { log.debug("Built " + i + " examples..."); } // Precompute a few things. SrlGraph srlGraph = sent.getSrlGraph(); Set<Integer> knownPreds = getKnownPreds(srlGraph); // Construct the factor graph. SrlFactorGraph sfg = new SrlFactorGraph(prm.fgPrm, sent.size(), knownPreds, cs.roleStateNames); // Get the variable assignments given in the training data. VarConfig trainConfig = getTrainAssignment(sent, srlGraph, sfg); FgExample ex = new FgExample(sfg, trainConfig); // Create a feature extractor for this example. SentFeatureExtractor sentFeatExt = new SentFeatureExtractor(prm.fePrm, sent, cs, obsAlphabet); FeatureExtractor featExtractor = new SrlFeatureExtractor(prm.srlFePrm, sfg, alphabet, sentFeatExt); // So we don't have to compute the features again for this example. featExts.add(featExtractor); // Cache only the features observed in training data. ex.cacheLatFeats(sfg, trainConfig, featExtractor); data.add(ex); } if (!prm.includeUnsupportedFeatures) { alphabet.stopGrowth(); } // Cache features for all the other variable assignments. for (int i=0; i<data.size(); i++) { if (i % 1000 == 0 && i > 0) { log.debug("Cached features for " + i + " examples..."); } FgExample ex = data.get(i); CoNLL09Sentence sent = sents.get(i); SrlGraph srlGraph = sent.getSrlGraph(); SrlFactorGraph sfg = (SrlFactorGraph) ex.getOriginalFactorGraph(); VarConfig trainConfig = getTrainAssignment(sent, srlGraph, sfg); FeatureExtractor featExtractor = featExts.get(i); ex.cacheLatPredFeats(sfg, trainConfig, featExtractor); } log.info("Num observation functions: " + obsAlphabet.size()); data.setSourceSentences(sents); return data; } private static Set<Integer> getKnownPreds(SrlGraph srlGraph) { List<SrlEdge> srlEdges = srlGraph.getEdges(); Set<Integer> knownPreds = new HashSet<Integer>(); // All the "Y"s for (SrlEdge e : srlEdges) { Integer a = e.getPred().getPosition(); knownPreds.add(a); } return knownPreds; } private VarConfig getTrainAssignment(CoNLL09Sentence sent, SrlGraph srlGraph, SrlFactorGraph sfg) { VarConfig vc = new VarConfig(); // Add all the training data assignments to the link variables, if they are not latent. // // IMPORTANT NOTE: We include the case where the parent is the Wall node (position -1). int[] parents = cs.prm.useGoldSyntax ? sent.getParentsFromHead() : sent.getParentsFromPhead(); for (int i=-1; i<sent.size(); i++) { for (int j=0; j<sent.size(); j++) { if (j != i && sfg.getLinkVar(i, j) != null) { LinkVar linkVar = sfg.getLinkVar(i, j); if (linkVar.getType() != VarType.LATENT) { // Syntactic head, from dependency parse. int state; if (parents[j] != i) { state = LinkVar.FALSE; } else { state = LinkVar.TRUE; } vc.put(linkVar, state); } } } } // Add all the training data assignments to the role variables, if they are not latent. // First, just set all the role names to "_". for (int i=0; i<sent.size(); i++) { for (int j=0; j<sent.size(); j++) { RoleVar roleVar = sfg.getRoleVar(i, j); if (roleVar != null && roleVar.getType() != VarType.LATENT) { vc.put(roleVar, "_"); } } } // Then set the ones which are observed. for (SrlEdge edge : srlGraph.getEdges()) { int parent = edge.getPred().getPosition(); int child = edge.getArg().getPosition(); String roleName = edge.getLabel(); RoleVar roleVar = sfg.getRoleVar(parent, child); if (roleVar != null && roleVar.getType() != VarType.LATENT) { int roleNameIdx = roleVar.getState(roleName); // TODO: This isn't quite right...we should really store the actual role name here. if (roleNameIdx == -1) { vc.put(roleVar, CorpusStatistics.UNKNOWN_ROLE); } else { vc.put(roleVar, roleNameIdx); } } } return vc; } }
Editing log message for preprocessing.
src/main/java/edu/jhu/srl/SrlFgExamplesBuilder.java
Editing log message for preprocessing.
<ide><path>rc/main/java/edu/jhu/srl/SrlFgExamplesBuilder.java <ide> for (int i=0; i<sents.size(); i++) { <ide> CoNLL09Sentence sent = sents.get(i); <ide> if (i % 1000 == 0 && i > 0) { <del> log.debug("Built " + i + " examples..."); <add> log.debug("Preprocessed " + i + " examples..."); <ide> } <ide> <ide> // Precompute a few things.
Java
apache-2.0
668cc93e6c08e6ecc9569e39946dcdcadca4e6a2
0
jhwhetstone/cdsWebserver,jhwhetstone/cdsWebserver,jhwhetstone/cdsWebserver,jhwhetstone/cdsWebserver
package org.pesc.config; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.http.SessionCreationPolicy; /** * Created by james on 2/17/16. */ @Configuration @EnableWebSecurity public class WebSecurityConfig extends WebSecurityConfigurerAdapter { @Override protected void configure(HttpSecurity http) throws Exception { http .csrf() .disable() .sessionManagement() .sessionCreationPolicy(SessionCreationPolicy.NEVER) .enableSessionUrlRewriting(false) .and() .authorizeRequests() .antMatchers("/static/**","/fonts/**", "/home", "/organizations", "/organization-details", "/home.html", "/", "/services/**").permitAll() .anyRequest().authenticated() .and() .formLogin() .loginPage("/login") .permitAll() .and() .logout() .permitAll(); http.httpBasic(); } @Autowired public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { auth .inMemoryAuthentication() .withUser("admin").password("password").roles("SYSTEM_ADMIN"); } }
directoryServer/src/main/java/org/pesc/config/WebSecurityConfig.java
package org.pesc.config; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.config.http.SessionCreationPolicy; /** * Created by james on 2/17/16. */ @Configuration @EnableWebSecurity public class WebSecurityConfig extends WebSecurityConfigurerAdapter { @Override protected void configure(HttpSecurity http) throws Exception { http .csrf() .disable() .sessionManagement() .sessionCreationPolicy(SessionCreationPolicy.NEVER) .enableSessionUrlRewriting(false) .and() .authorizeRequests() .antMatchers("/static/**","/fonts/**", "/home", "/organizations", "/home.html", "/", "/services/**").permitAll() .anyRequest().authenticated() .and() .formLogin() .loginPage("/login") .permitAll() .and() .logout() .permitAll(); http.httpBasic(); } @Autowired public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { auth .inMemoryAuthentication() .withUser("admin").password("password").roles("SYSTEM_ADMIN"); } }
Updated security settings to allow public access to organization details view.
directoryServer/src/main/java/org/pesc/config/WebSecurityConfig.java
Updated security settings to allow public access to organization details view.
<ide><path>irectoryServer/src/main/java/org/pesc/config/WebSecurityConfig.java <ide> .enableSessionUrlRewriting(false) <ide> .and() <ide> .authorizeRequests() <del> .antMatchers("/static/**","/fonts/**", "/home", "/organizations", "/home.html", "/", "/services/**").permitAll() <add> .antMatchers("/static/**","/fonts/**", "/home", "/organizations", "/organization-details", "/home.html", "/", "/services/**").permitAll() <ide> .anyRequest().authenticated() <ide> .and() <ide> .formLogin()
Java
mit
c8d03d0362cf72db81f03b4984225dce2946882c
0
JEEventStore/JEECQRS
/* * Copyright (c) 2013 Red Rainbow IT Solutions GmbH, Germany * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.jeecqrs.sagas.handler.local; import java.util.Set; import java.util.logging.Level; import org.jeecqrs.sagas.handler.SagaService; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.ejb.EJB; import org.jeecqrs.event.EventBusListener; import org.jeecqrs.event.EventBusListenerRegistry; import org.jeecqrs.event.EventInterest; import org.jeecqrs.event.registry.AbstractEventBusListenerRegistry; import org.jeecqrs.sagas.Saga; import org.jeecqrs.sagas.SagaConfig; import org.jeecqrs.sagas.SagaConfigResolver; import org.jeecqrs.sagas.SagaRegistry; /** * */ public class RegisterSagaHandlersEventBusListenerRegistry<E> extends AbstractEventBusListenerRegistry<E> { private final Logger log =Logger.getLogger( RegisterSagaHandlersEventBusListenerRegistry.class.getName()); @EJB(name="sagaRegistry") private SagaRegistry<E> sagaRegistry; @EJB(name="sagaService") private SagaService sagaService; @EJB(name="sagaConfigResolver") private SagaConfigResolver<E> sagaConfigResolver; @PostConstruct public void startup() { log.info("Registering event listeners for sagas"); Set<Class<? extends Saga<E>>> sagas = sagaRegistry.allSagas(); if (sagas.isEmpty()) log.info("No sagas found"); else registerAll(sagas); } protected void registerAll(Set<Class<? extends Saga<E>>> sagas) { for (Class<? extends Saga<E>> sagaClass : sagas) register(sagaClass); } protected void register(Class<? extends Saga<E>> sagaClass) { SagaConfig<? extends Saga<E>, E> config = sagaConfigResolver.configure(sagaClass); log.log(Level.INFO, "Registering {0} for {1}", new Object[]{sagaClass.getSimpleName(), buildEventLogString(config.interestedInEvents())}); this.register(new SagaEventBusListener(sagaClass, config, sagaService)); } private String buildEventLogString(EventInterest<E> interest) { StringBuilder builder = new StringBuilder(); builder.append("["); int c = 0; for (Class<? extends E> cls : interest.interestEventTypes()) { if (c++ > 0) builder.append(", "); builder.append(cls.getSimpleName()); } builder.append("]"); return builder.toString(); } }
core/src/main/java/org/jeecqrs/sagas/handler/local/RegisterSagaHandlersEventBusListenerRegistry.java
/* * Copyright (c) 2013 Red Rainbow IT Solutions GmbH, Germany * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package org.jeecqrs.sagas.handler.local; import java.util.Set; import java.util.logging.Level; import org.jeecqrs.sagas.handler.SagaService; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.ejb.EJB; import org.jeecqrs.event.EventBusListener; import org.jeecqrs.event.EventBusListenerRegistry; import org.jeecqrs.event.EventInterest; import org.jeecqrs.event.registry.AbstractEventBusListenerRegistry; import org.jeecqrs.sagas.Saga; import org.jeecqrs.sagas.SagaConfig; import org.jeecqrs.sagas.SagaConfigResolver; import org.jeecqrs.sagas.SagaRegistry; /** * */ public class RegisterSagaHandlersEventBusListenerRegistry<E> extends AbstractEventBusListenerRegistry<E> { private final Logger log =Logger.getLogger( RegisterSagaHandlersEventBusListenerRegistry.class.getName()); @EJB(name="listenerRegistry") private EventBusListenerRegistry<E> delegateRegistry; @EJB(name="sagaRegistry") private SagaRegistry<E> sagaRegistry; @EJB(name="sagaService") private SagaService sagaService; @EJB(name="sagaConfigResolver") private SagaConfigResolver<E> sagaConfigResolver; @PostConstruct public void startup() { log.fine("Registering event listeners from delegate registry"); for (EventBusListener<E> ebl : delegateRegistry.allListeners()) this.register(ebl); log.info("Registering event listeners for sagas"); Set<Class<? extends Saga<E>>> sagas = sagaRegistry.allSagas(); if (sagas.isEmpty()) log.info("No sagas found"); else registerAll(sagas); } protected void registerAll(Set<Class<? extends Saga<E>>> sagas) { for (Class<? extends Saga<E>> sagaClass : sagas) register(sagaClass); } protected void register(Class<? extends Saga<E>> sagaClass) { SagaConfig<? extends Saga<E>, E> config = sagaConfigResolver.configure(sagaClass); log.log(Level.INFO, "Registering {0} for {1}", new Object[]{sagaClass.getSimpleName(), buildEventLogString(config.interestedInEvents())}); this.register(new SagaEventBusListener(sagaClass, config, sagaService)); } private String buildEventLogString(EventInterest<E> interest) { StringBuilder builder = new StringBuilder(); builder.append("["); int c = 0; for (Class<? extends E> cls : interest.interestEventTypes()) { if (c++ > 0) builder.append(", "); builder.append(cls.getSimpleName()); } builder.append("]"); return builder.toString(); } }
Remove delegate listener registry, not longer needed
core/src/main/java/org/jeecqrs/sagas/handler/local/RegisterSagaHandlersEventBusListenerRegistry.java
Remove delegate listener registry, not longer needed
<ide><path>ore/src/main/java/org/jeecqrs/sagas/handler/local/RegisterSagaHandlersEventBusListenerRegistry.java <ide> private final Logger log =Logger.getLogger( <ide> RegisterSagaHandlersEventBusListenerRegistry.class.getName()); <ide> <del> @EJB(name="listenerRegistry") <del> private EventBusListenerRegistry<E> delegateRegistry; <del> <ide> @EJB(name="sagaRegistry") <ide> private SagaRegistry<E> sagaRegistry; <ide> <ide> <ide> @PostConstruct <ide> public void startup() { <del> log.fine("Registering event listeners from delegate registry"); <del> for (EventBusListener<E> ebl : delegateRegistry.allListeners()) <del> this.register(ebl); <ide> log.info("Registering event listeners for sagas"); <ide> Set<Class<? extends Saga<E>>> sagas = sagaRegistry.allSagas(); <ide> if (sagas.isEmpty())
Java
apache-2.0
129a3ff7de3da53bb8b6732b31f51d416ea802be
0
electrum/presto,smartnews/presto,Praveen2112/presto,Praveen2112/presto,Praveen2112/presto,dain/presto,erichwang/presto,losipiuk/presto,electrum/presto,erichwang/presto,electrum/presto,11xor6/presto,losipiuk/presto,ebyhr/presto,smartnews/presto,Praveen2112/presto,erichwang/presto,ebyhr/presto,ebyhr/presto,smartnews/presto,dain/presto,Praveen2112/presto,smartnews/presto,11xor6/presto,losipiuk/presto,11xor6/presto,losipiuk/presto,losipiuk/presto,dain/presto,electrum/presto,electrum/presto,smartnews/presto,ebyhr/presto,dain/presto,11xor6/presto,dain/presto,ebyhr/presto,erichwang/presto,erichwang/presto,11xor6/presto
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.tests.iceberg; import io.prestosql.tempto.AfterTestWithContext; import io.prestosql.tempto.BeforeTestWithContext; import io.prestosql.tempto.ProductTest; import io.prestosql.testng.services.Flaky; import org.testng.annotations.Test; import static io.prestosql.tempto.assertions.QueryAssert.Row.row; import static io.prestosql.tempto.assertions.QueryAssert.assertThat; import static io.prestosql.tests.TestGroups.ICEBERG; import static io.prestosql.tests.TestGroups.STORAGE_FORMATS; import static io.prestosql.tests.hive.util.TemporaryHiveTable.randomTableSuffix; import static io.prestosql.tests.utils.QueryExecutors.onPresto; public class TestIcebergCreateTable extends ProductTest { @BeforeTestWithContext public void setUp() { onPresto().executeQuery("CREATE SCHEMA iceberg.iceberg"); } @AfterTestWithContext public void cleanUp() { onPresto().executeQuery("DROP SCHEMA iceberg.iceberg"); } @Test(groups = {ICEBERG, STORAGE_FORMATS}) @Flaky(issue = "https://github.com/prestosql/presto/issues/4864", match = "Failed to read footer of file") public void testCreateTable() { String tableName = "iceberg.iceberg.test_create_table_" + randomTableSuffix(); onPresto().executeQuery("CREATE TABLE " + tableName + "(a bigint, b varchar)"); onPresto().executeQuery("INSERT INTO " + tableName + "(a, b) VALUES " + "(NULL, NULL), " + "(-42, 'abc'), " + "(9223372036854775807, 'abcdefghijklmnopqrstuvwxyz')"); assertThat(onPresto().executeQuery("SELECT * FROM " + tableName)) .containsOnly( row(null, null), row(-42, "abc"), row(9223372036854775807L, "abcdefghijklmnopqrstuvwxyz")); onPresto().executeQuery("DROP TABLE " + tableName); } @Test(groups = {ICEBERG, STORAGE_FORMATS}) @Flaky(issue = "https://github.com/prestosql/presto/issues/4864", match = "Failed to read footer of file") public void testCreateTableAsSelect() { String tableName = "iceberg.iceberg.test_create_table_as_select_" + randomTableSuffix(); onPresto().executeQuery("" + "CREATE TABLE " + tableName + " AS " + "SELECT * FROM (VALUES " + " (NULL, NULL), " + " (-42, 'abc'), " + " (9223372036854775807, 'abcdefghijklmnopqrstuvwxyz')" + ") t(a, b)"); assertThat(onPresto().executeQuery("SELECT * FROM " + tableName)) .containsOnly( row(null, null), row(-42, "abc"), row(9223372036854775807L, "abcdefghijklmnopqrstuvwxyz")); onPresto().executeQuery("DROP TABLE " + tableName); } }
presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.tests.iceberg; import io.prestosql.tempto.AfterTestWithContext; import io.prestosql.tempto.BeforeTestWithContext; import io.prestosql.tempto.ProductTest; import io.prestosql.tempto.query.QueryExecutor; import io.prestosql.testng.services.Flaky; import org.testng.annotations.Test; import static io.prestosql.tempto.assertions.QueryAssert.Row.row; import static io.prestosql.tempto.assertions.QueryAssert.assertThat; import static io.prestosql.tests.TestGroups.ICEBERG; import static io.prestosql.tests.TestGroups.STORAGE_FORMATS; import static io.prestosql.tests.hive.util.TemporaryHiveTable.randomTableSuffix; import static io.prestosql.tests.utils.QueryExecutors.onPresto; public class TestIcebergCreateTable extends ProductTest { @BeforeTestWithContext public void setUp() { onPresto().executeQuery("CREATE SCHEMA iceberg.iceberg"); } @AfterTestWithContext public void cleanUp() { onPresto().executeQuery("DROP SCHEMA iceberg.iceberg"); } @Test(groups = {ICEBERG, STORAGE_FORMATS}) @Flaky(issue = "https://github.com/prestosql/presto/issues/4864", match = "Failed to read footer of file") public void testCreateTable() { String tableName = "test_create_table_" + randomTableSuffix(); QueryExecutor queryExecutor = onPresto(); queryExecutor.executeQuery("use iceberg.iceberg"); queryExecutor.executeQuery("CREATE TABLE " + tableName + "(a bigint, b varchar)"); queryExecutor.executeQuery("INSERT INTO " + tableName + "(a, b) VALUES " + "(NULL, NULL), " + "(-42, 'abc'), " + "(9223372036854775807, 'abcdefghijklmnopqrstuvwxyz')"); assertThat(queryExecutor.executeQuery("SELECT * FROM " + tableName)) .containsOnly( row(null, null), row(-42, "abc"), row(9223372036854775807L, "abcdefghijklmnopqrstuvwxyz")); queryExecutor.executeQuery("DROP TABLE " + tableName); } @Test(groups = {ICEBERG, STORAGE_FORMATS}) @Flaky(issue = "https://github.com/prestosql/presto/issues/4864", match = "Failed to read footer of file") public void testCreateTableAsSelect() { String tableName = "test_create_table_as_select_" + randomTableSuffix(); QueryExecutor queryExecutor = onPresto(); queryExecutor.executeQuery("use iceberg.iceberg"); queryExecutor.executeQuery("" + "CREATE TABLE " + tableName + " AS " + "SELECT * FROM (VALUES " + " (NULL, NULL), " + " (-42, 'abc'), " + " (9223372036854775807, 'abcdefghijklmnopqrstuvwxyz')" + ") t(a, b)"); assertThat(queryExecutor.executeQuery("SELECT * FROM " + tableName)) .containsOnly( row(null, null), row(-42, "abc"), row(9223372036854775807L, "abcdefghijklmnopqrstuvwxyz")); queryExecutor.executeQuery("DROP TABLE " + tableName); } }
Simplify QueryExecutor usage in TestIcebergCreateTable
presto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java
Simplify QueryExecutor usage in TestIcebergCreateTable
<ide><path>resto-product-tests/src/main/java/io/prestosql/tests/iceberg/TestIcebergCreateTable.java <ide> import io.prestosql.tempto.AfterTestWithContext; <ide> import io.prestosql.tempto.BeforeTestWithContext; <ide> import io.prestosql.tempto.ProductTest; <del>import io.prestosql.tempto.query.QueryExecutor; <ide> import io.prestosql.testng.services.Flaky; <ide> import org.testng.annotations.Test; <ide> <ide> @Flaky(issue = "https://github.com/prestosql/presto/issues/4864", match = "Failed to read footer of file") <ide> public void testCreateTable() <ide> { <del> String tableName = "test_create_table_" + randomTableSuffix(); <del> QueryExecutor queryExecutor = onPresto(); <del> queryExecutor.executeQuery("use iceberg.iceberg"); <del> queryExecutor.executeQuery("CREATE TABLE " + tableName + "(a bigint, b varchar)"); <del> queryExecutor.executeQuery("INSERT INTO " + tableName + "(a, b) VALUES " + <add> String tableName = "iceberg.iceberg.test_create_table_" + randomTableSuffix(); <add> onPresto().executeQuery("CREATE TABLE " + tableName + "(a bigint, b varchar)"); <add> onPresto().executeQuery("INSERT INTO " + tableName + "(a, b) VALUES " + <ide> "(NULL, NULL), " + <ide> "(-42, 'abc'), " + <ide> "(9223372036854775807, 'abcdefghijklmnopqrstuvwxyz')"); <del> assertThat(queryExecutor.executeQuery("SELECT * FROM " + tableName)) <add> assertThat(onPresto().executeQuery("SELECT * FROM " + tableName)) <ide> .containsOnly( <ide> row(null, null), <ide> row(-42, "abc"), <ide> row(9223372036854775807L, "abcdefghijklmnopqrstuvwxyz")); <del> queryExecutor.executeQuery("DROP TABLE " + tableName); <add> onPresto().executeQuery("DROP TABLE " + tableName); <ide> } <ide> <ide> @Test(groups = {ICEBERG, STORAGE_FORMATS}) <ide> @Flaky(issue = "https://github.com/prestosql/presto/issues/4864", match = "Failed to read footer of file") <ide> public void testCreateTableAsSelect() <ide> { <del> String tableName = "test_create_table_as_select_" + randomTableSuffix(); <del> QueryExecutor queryExecutor = onPresto(); <del> queryExecutor.executeQuery("use iceberg.iceberg"); <del> queryExecutor.executeQuery("" + <add> String tableName = "iceberg.iceberg.test_create_table_as_select_" + randomTableSuffix(); <add> onPresto().executeQuery("" + <ide> "CREATE TABLE " + tableName + " AS " + <ide> "SELECT * FROM (VALUES " + <ide> " (NULL, NULL), " + <ide> " (-42, 'abc'), " + <ide> " (9223372036854775807, 'abcdefghijklmnopqrstuvwxyz')" + <ide> ") t(a, b)"); <del> assertThat(queryExecutor.executeQuery("SELECT * FROM " + tableName)) <add> assertThat(onPresto().executeQuery("SELECT * FROM " + tableName)) <ide> .containsOnly( <ide> row(null, null), <ide> row(-42, "abc"), <ide> row(9223372036854775807L, "abcdefghijklmnopqrstuvwxyz")); <del> queryExecutor.executeQuery("DROP TABLE " + tableName); <add> onPresto().executeQuery("DROP TABLE " + tableName); <ide> } <ide> }
Java
apache-2.0
error: pathspec 'src/main/java/com/yin/myproject/demo/concurrent/base/SimplePriorities.java' did not match any file(s) known to git
67cda42e270688d20681e315c8887d61b9946039
1
SmallBadFish/demo,SmallBadFish/demo
package com.yin.myproject.demo.concurrent.base; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class SimplePriorities implements Runnable { private int countDown = 5; private volatile double d; private int priority; public SimplePriorities(int priority) { this.priority = priority; } public String toString() { return "name:" + Thread.currentThread().getName() + " priority: " + Thread.currentThread().getPriority() + ": " + countDown; } public void run() { Thread.currentThread().setPriority(priority); while (true) { for (int i = 1; i < 100000; i++) { d += (Math.PI + Math.E) / (double) i; if (i % 1000 == 0) { Thread.yield(); } } System.out.println(this); if (--countDown == 0) return; } } public static void main(String[] args) { ExecutorService exec = Executors.newCachedThreadPool(); for (int i = 0; i < 5; i++) { exec.execute(new SimplePriorities(Thread.MIN_PRIORITY)); } exec.execute(new SimplePriorities(Thread.MAX_PRIORITY)); exec.shutdown(); } }
src/main/java/com/yin/myproject/demo/concurrent/base/SimplePriorities.java
线程优先级Demo
src/main/java/com/yin/myproject/demo/concurrent/base/SimplePriorities.java
线程优先级Demo
<ide><path>rc/main/java/com/yin/myproject/demo/concurrent/base/SimplePriorities.java <add>package com.yin.myproject.demo.concurrent.base; <add> <add>import java.util.concurrent.ExecutorService; <add>import java.util.concurrent.Executors; <add> <add>public class SimplePriorities implements Runnable { <add> private int countDown = 5; <add> private volatile double d; <add> private int priority; <add> <add> public SimplePriorities(int priority) { <add> this.priority = priority; <add> } <add> <add> public String toString() { <add> return "name:" + Thread.currentThread().getName() + " priority: " + Thread.currentThread().getPriority() + ": " <add> + countDown; <add> } <add> <add> public void run() { <add> Thread.currentThread().setPriority(priority); <add> while (true) { <add> for (int i = 1; i < 100000; i++) { <add> d += (Math.PI + Math.E) / (double) i; <add> if (i % 1000 == 0) { <add> Thread.yield(); <add> } <add> } <add> System.out.println(this); <add> if (--countDown == 0) <add> return; <add> } <add> } <add> <add> public static void main(String[] args) { <add> ExecutorService exec = Executors.newCachedThreadPool(); <add> for (int i = 0; i < 5; i++) { <add> exec.execute(new SimplePriorities(Thread.MIN_PRIORITY)); <add> } <add> exec.execute(new SimplePriorities(Thread.MAX_PRIORITY)); <add> exec.shutdown(); <add> } <add>}
JavaScript
apache-2.0
5ccfecf6fada7430058caf2325ba2900b2276e9c
0
mwaylabs/uikit,mwaylabs/uikit,mwaylabs/uikit
angular.module('mwUI.UiComponents') .directive('mwButtonHelp', function (i18n, $compile) { return { restrict: 'A', scope: true, link: function (scope, elm) { var popup; var helpIcon = $compile(angular.element('<div mw-icon="mwUI.questionCircle">'))(scope) .addClass('help-icon hidden-sm hidden-xs'); elm.addClass('mw-button-help'); elm.prepend(helpIcon); var buildPopup = function () { popup = angular.element('<div>' + scope.helpText + '<ul></ul></div>').addClass('mw-button-help-popover popover'); angular.forEach(scope.hintsToShow, function (hint) { popup.find('ul').append('<li>' + hint.text + '</li>'); }); }; helpIcon.hover(function () { buildPopup(); var targetOffset = angular.element(this).offset(); angular.element('body').append(popup); popup.css('top', targetOffset.top - (popup.height() / 2) + 10 - angular.element(document).scrollTop()); popup.css('left', (targetOffset.left + 40)); }, function () { angular.element('body > .mw-button-help-popover').remove(); }); scope.$watch('hintsToShow', function (newVal) { if (newVal && newVal.length) { helpIcon.removeClass('hidden'); } else { helpIcon.addClass('hidden'); } }); scope.$on('$destroy', function () { if (popup) { popup.remove(); } }); }, controller: function ($scope) { $scope.registeredHints = []; $scope.hintsToShow = []; $scope.helpText = i18n.get('UiComponents.mwButtonHelp.isDisabledBecause'); $scope.$on('i18n:localeChanged', function () { $scope.helpText = i18n.get('common.buttonHelp'); }); var showHelp = function () { $scope.hintsToShow = []; angular.forEach($scope.registeredHints, function (registered) { if (registered.condition) { $scope.hintsToShow.push(registered); } }); }; //check if any condition changes this.register = function (registered) { $scope.$watch(function () { return registered.condition; }, showHelp); $scope.registeredHints.push(registered); }; } }; });
src/mw-ui-components/directives/mw_button_help.js
angular.module('mwUI.UiComponents') .directive('mwButtonHelp', function (i18n, $compile) { return { restrict: 'A', scope: true, link: function (scope, elm) { var popup; var helpIcon = $compile(angular.element('<div mw-icon="mwUI.question">'))(scope) .addClass('help-icon hidden-sm hidden-xs'); elm.addClass('mw-button-help'); elm.prepend(helpIcon); var buildPopup = function () { popup = angular.element('<div>' + scope.helpText + '<ul></ul></div>').addClass('mw-button-help-popover popover'); angular.forEach(scope.hintsToShow, function (hint) { popup.find('ul').append('<li>' + hint.text + '</li>'); }); }; helpIcon.hover(function () { buildPopup(); var targetOffset = angular.element(this).offset(); angular.element('body').append(popup); popup.css('top', targetOffset.top - (popup.height() / 2) + 10 - angular.element(document).scrollTop()); popup.css('left', (targetOffset.left + 40)); }, function () { angular.element('body > .mw-button-help-popover').remove(); }); scope.$watch('hintsToShow', function (newVal) { if (newVal && newVal.length) { helpIcon.removeClass('hidden'); } else { helpIcon.addClass('hidden'); } }); scope.$on('$destroy', function () { if (popup) { popup.remove(); } }); }, controller: function ($scope) { $scope.registeredHints = []; $scope.hintsToShow = []; $scope.helpText = i18n.get('UiComponents.mwButtonHelp.isDisabledBecause'); $scope.$on('i18n:localeChanged', function () { $scope.helpText = i18n.get('common.buttonHelp'); }); var showHelp = function () { $scope.hintsToShow = []; angular.forEach($scope.registeredHints, function (registered) { if (registered.condition) { $scope.hintsToShow.push(registered); } }); }; //check if any condition changes this.register = function (registered) { $scope.$watch(function () { return registered.condition; }, showHelp); $scope.registeredHints.push(registered); }; } }; });
use different icon because the other one looked bad
src/mw-ui-components/directives/mw_button_help.js
use different icon because the other one looked bad
<ide><path>rc/mw-ui-components/directives/mw_button_help.js <ide> link: function (scope, elm) { <ide> var popup; <ide> var helpIcon = <del> $compile(angular.element('<div mw-icon="mwUI.question">'))(scope) <add> $compile(angular.element('<div mw-icon="mwUI.questionCircle">'))(scope) <ide> .addClass('help-icon hidden-sm hidden-xs'); <ide> <ide> elm.addClass('mw-button-help');
Java
apache-2.0
78bd07158066dfc7ed86836a976c8ebeaa0947cb
0
aldaris/wicket,klopfdreh/wicket,selckin/wicket,aldaris/wicket,astrapi69/wicket,apache/wicket,selckin/wicket,zwsong/wicket,astrapi69/wicket,dashorst/wicket,freiheit-com/wicket,klopfdreh/wicket,bitstorm/wicket,freiheit-com/wicket,Servoy/wicket,mosoft521/wicket,mosoft521/wicket,martin-g/wicket-osgi,astrapi69/wicket,dashorst/wicket,AlienQueen/wicket,AlienQueen/wicket,apache/wicket,apache/wicket,aldaris/wicket,Servoy/wicket,martin-g/wicket-osgi,apache/wicket,aldaris/wicket,freiheit-com/wicket,apache/wicket,topicusonderwijs/wicket,mosoft521/wicket,mafulafunk/wicket,freiheit-com/wicket,dashorst/wicket,bitstorm/wicket,dashorst/wicket,bitstorm/wicket,astrapi69/wicket,zwsong/wicket,mosoft521/wicket,AlienQueen/wicket,selckin/wicket,AlienQueen/wicket,AlienQueen/wicket,Servoy/wicket,mafulafunk/wicket,topicusonderwijs/wicket,klopfdreh/wicket,aldaris/wicket,selckin/wicket,topicusonderwijs/wicket,Servoy/wicket,Servoy/wicket,dashorst/wicket,klopfdreh/wicket,bitstorm/wicket,klopfdreh/wicket,topicusonderwijs/wicket,mosoft521/wicket,freiheit-com/wicket,selckin/wicket,topicusonderwijs/wicket,mafulafunk/wicket,martin-g/wicket-osgi,zwsong/wicket,zwsong/wicket,bitstorm/wicket
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.protocol.http; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.apache.wicket.Application; import org.apache.wicket.Component; import org.apache.wicket.IClusterable; import org.apache.wicket.IPageMap; import org.apache.wicket.Page; import org.apache.wicket.PageMap; import org.apache.wicket.Request; import org.apache.wicket.session.pagemap.IPageMapEntry; import org.apache.wicket.util.collections.IntHashMap; import org.apache.wicket.version.IPageVersionManager; import org.apache.wicket.version.undo.Change; /** * FIXME document me! * * @author jcompagner */ public class SecondLevelCacheSessionStore extends HttpSessionStore { /** * This interface is used by the SecondLevelCacheSessionStore so that pages * can be stored to a persistent layer. Implementation should store the page * that it gets under the id and version number. So that every page version * can be reconstructed when asked for. * * @see FilePageStore as default implementation. */ public static interface IPageStore { /** * Destroy the store. */ void destroy(); /** * Restores a page version from the persistent layer * * @param sessionId * @param pagemap * @param id * @param versionNumber * @param ajaxVersionNumber * @return The page */ Page getPage(String sessionId, String pagemap, int id, int versionNumber, int ajaxVersionNumber); /** * This method is called when the page is accessed. A IPageStore * implementation can block until a save of that page version is done. * So that a specific page version is always restore able. * * @param sessionId * @param page */ void pageAccessed(String sessionId, Page page); /** * Removes a page from the persistent layer. * * @param sessionId * The session of the page that must be removed * @param pagemap * The pagemap of the page that must be removed * @param id * The id of the page. */ void removePage(String sessionId, String pagemap, int id); /** * Stores the page to a persistent layer. The page should be stored * under the id and the version number. * * @param sessionId * @param page */ void storePage(String sessionId, Page page); /** * The pagestore should cleanup all the pages for that sessionid. * * @param sessionId */ void unbind(String sessionId); } /** * Page map implementation for this session store. */ private static final class SecondLevelCachePageMap extends PageMap { private static final long serialVersionUID = 1L; private Page lastPage = null; private List pageVersions = new ArrayList(); /** * Construct. * * @param name */ private SecondLevelCachePageMap(String name) { super(name); } /** * @see org.apache.wicket.PageMap#get(int, int) */ public Page get(int id, int versionNumber) { HashMap pageMaps = (HashMap)usedPages.get(); if (pageMaps == null) { pageMaps = new HashMap(); usedPages.set(pageMaps); } IntHashMap pages = (IntHashMap)pageMaps.get(getName()); if (pages == null) { pages = new IntHashMap(); pageMaps.put(getName(), pages); } // for now i only get by id. // does it really make any sense that there are multiply instances // of the // same page are alive in one session?? Page page = (Page)pages.get(id); if (page != null) return page; PageVersions pv = null; if (versionNumber == -1) { // when no version was specified, get the last touched page for // given page id for (int index = pageVersions.size() - 1; index >= 0; --index) { if (((PageVersions)pageVersions.get(index)).pageid == id) { pv = (PageVersions)pageVersions.get(index); versionNumber = pv.versionid; break; } } } String sessionId = getSession().getId(); if (lastPage != null && lastPage.getNumericId() == id) { page = lastPage.getVersion(versionNumber); if (page != null) { // ask the page store if it is ready saving the page. getStore().pageAccessed(sessionId, page); pages.put(id, page); return page; } } if (sessionId != null) { int ajaxVersionNumber = 0; if (pv == null) { int index = pageVersions.indexOf(new PageVersions(id, versionNumber, -1)); if (index != -1) { pv = (PageVersions)pageVersions.get(index); } } if (pv != null) { ajaxVersionNumber = pv.ajaxversionid; } lastPage = null; page = getStore().getPage(sessionId, getName(), id, versionNumber, ajaxVersionNumber); pages.put(id, page); return page; } return null; } /** * @see org.apache.wicket.PageMap#put(org.apache.wicket.Page) */ public void put(Page page) { if (!page.isPageStateless()) { String sessionId = getSession().getId(); if (sessionId != null) { getStore().storePage(sessionId, page); lastPage = page; dirty(); PageVersions pv = new PageVersions(page.getNumericId(), page .getCurrentVersionNumber(), page.getAjaxVersionNumber()); pageVersions.remove(pv); pageVersions.add(pv); if (pageVersions.size() > 100) { pageVersions.remove(0); } } } } /** * @see org.apache.wicket.PageMap#clear() */ public void clear() { super.clear(); String sessionId = getSession().getId(); if (sessionId != null) { getStore().removePage(sessionId, getName(), -1); } } /** * @see org.apache.wicket.PageMap#removeEntry(org.apache.wicket.session.pagemap.IPageMapEntry) */ public void removeEntry(IPageMapEntry entry) { String sessionId = getSession().getId(); if (sessionId != null) { getStore().removePage(sessionId, getName(), entry.getNumericId()); } } private IPageStore getStore() { return ((SecondLevelCacheSessionStore)Application.get().getSessionStore()).getStore(); } private static class PageVersions implements IClusterable { private static final long serialVersionUID = 1L; private final int pageid; private int versionid; private int ajaxversionid; PageVersions(int pageid, int versionid, int ajaxversionid) { this.pageid = pageid; this.versionid = versionid; this.ajaxversionid = ajaxversionid; } /** * @see java.lang.Object#equals(java.lang.Object) */ public boolean equals(Object obj) { if (obj instanceof PageVersions) { return ((PageVersions)obj).pageid == pageid && ((PageVersions)obj).versionid == versionid; } return false; } /** * @see java.lang.Object#hashCode() */ public int hashCode() { return pageid; } } } /** * version manager for this session store. */ private static final class SecondLevelCachePageVersionManager implements IPageVersionManager { private static final long serialVersionUID = 1L; private short currentVersionNumber; private short currentAjaxVersionNumber; private short lastAjaxVersionNumber; private Page page; private transient boolean versionStarted; /** * Construct. * * @param page */ public SecondLevelCachePageVersionManager(Page page) { this.page = page; } /** * @see org.apache.wicket.version.IPageVersionManager#beginVersion(boolean) */ public void beginVersion(boolean mergeVersion) { // this is an hack.. when object is read in. It must ignore the // first version bump. if (versionStarted) return; versionStarted = true; if (!mergeVersion) { currentVersionNumber++; lastAjaxVersionNumber = currentAjaxVersionNumber; currentAjaxVersionNumber = 0; } else { currentAjaxVersionNumber++; } } /** * @see org.apache.wicket.version.IPageVersionManager#componentAdded(org.apache.wicket.Component) */ public void componentAdded(Component component) { } /** * @see org.apache.wicket.version.IPageVersionManager#componentModelChanging(org.apache.wicket.Component) */ public void componentModelChanging(Component component) { } /** * @see org.apache.wicket.version.IPageVersionManager#componentRemoved(org.apache.wicket.Component) */ public void componentRemoved(Component component) { } /** * @see org.apache.wicket.version.IPageVersionManager#componentStateChanging(org.apache.wicket.version.undo.Change) */ public void componentStateChanging(Change change) { } /** * @see org.apache.wicket.version.IPageVersionManager#endVersion(boolean) */ public void endVersion(boolean mergeVersion) { versionStarted = false; String sessionId = page.getSession().getId(); if (sessionId != null) { IPageStore store = ((SecondLevelCacheSessionStore)Application.get() .getSessionStore()).getStore(); store.storePage(sessionId, page); } } /** * @see org.apache.wicket.version.IPageVersionManager#expireOldestVersion() */ public void expireOldestVersion() { } /** * @see org.apache.wicket.version.IPageVersionManager#getAjaxVersionNumber() */ public int getAjaxVersionNumber() { return currentAjaxVersionNumber; } /** * @see org.apache.wicket.version.IPageVersionManager#getCurrentVersionNumber() */ public int getCurrentVersionNumber() { return currentVersionNumber; } /** * @see org.apache.wicket.version.IPageVersionManager#getVersion(int) */ public Page getVersion(int versionNumber) { if (currentVersionNumber == versionNumber) { return page; } return null; } /** * @see org.apache.wicket.version.IPageVersionManager#getVersions() */ public int getVersions() { return 0; } /** * @see org.apache.wicket.version.IPageVersionManager#ignoreVersionMerge() */ public void ignoreVersionMerge() { currentVersionNumber++; lastAjaxVersionNumber = currentAjaxVersionNumber; currentAjaxVersionNumber = 0; } /** * @see org.apache.wicket.version.IPageVersionManager#rollbackPage(int) */ public Page rollbackPage(int numberOfVersions) { String sessionId = page.getSession().getId(); if (sessionId != null) { int versionNumber = currentVersionNumber; int ajaxNumber = currentAjaxVersionNumber; if (versionStarted) { versionNumber--; ajaxNumber--; } IPageStore store = ((SecondLevelCacheSessionStore)Application.get() .getSessionStore()).getStore(); // if the number of versions to rollback can be done inside the // current page version. if (ajaxNumber >= numberOfVersions) { return store.getPage(sessionId, page.getPageMapName(), page.getNumericId(), versionNumber, ajaxNumber - numberOfVersions); } else { // else go one page version down. versionNumber--; // then calculate the previous ajax version by looking at // the last ajax number of the previous version. ajaxNumber = lastAjaxVersionNumber - (numberOfVersions - ajaxNumber); if (ajaxNumber < 0) { // currently it is not supported to jump over 2 // pages.... log .error("trying to rollback to many versions, jumping over 2 page versions is not supported yet."); return null; } return store.getPage(sessionId, page.getPageMapName(), page.getNumericId(), versionNumber, ajaxNumber); } } return null; } private void readObject(java.io.ObjectInputStream s) throws IOException, ClassNotFoundException { s.defaultReadObject(); // this is an hack.. when object is read in. It must ignore the // first version bump. // (matej_k) for now, I'm commenting it out. It causes serious // trouble with back // button, where new versions are not created as they should be // johan promised to look at it soon // versionStarted = true; } } static final ThreadLocal usedPages = new ThreadLocal(); private final IPageStore pageStore; /** * Construct. * * @param application * The application for this store * * @param pageStore * Page store for keeping page versions */ public SecondLevelCacheSessionStore(Application application, final IPageStore pageStore) { super(application); this.pageStore = pageStore; // turn automatic multi window support off by default, as we don't // really // need to be afraid to run out of history with this implementation. // note that the session store is created before Application#init is // called, so if users set this setting explicitly, it'll be overridden // (and that's exactly what we want: provide a better default, but not // forcing people to do away with this feature). Application.get().getPageSettings().setAutomaticMultiWindowSupport(false); } /** * @see org.apache.wicket.protocol.http.HttpSessionStore#createPageMap(java.lang.String, * org.apache.wicket.Session) */ public IPageMap createPageMap(String name) { return new SecondLevelCachePageMap(name); } /** * @see org.apache.wicket.protocol.http.AbstractHttpSessionStore#onEndRequest(org.apache.wicket.Request) */ public void onEndRequest(Request request) { super.onEndRequest(request); usedPages.set(null); } /** * @see org.apache.wicket.protocol.http.AbstractHttpSessionStore#destroy() */ public void destroy() { super.destroy(); getStore().destroy(); } /** * @return The store to use */ public IPageStore getStore() { return pageStore; } /** * @see org.apache.wicket.protocol.http.HttpSessionStore#newVersionManager(org.apache.wicket.Page) */ public IPageVersionManager newVersionManager(Page page) { return new SecondLevelCachePageVersionManager(page); } /** * @see org.apache.wicket.session.ISessionStore#setAttribute(org.apache.wicket.Request, * java.lang.String, java.lang.Object) */ public void setAttribute(Request request, String name, Object value) { // ignore all pages, they are stored through the pagemap if (!(value instanceof Page)) { super.setAttribute(request, name, value); } } /** * @see org.apache.wicket.protocol.http.AbstractHttpSessionStore#onUnbind(java.lang.String) */ protected void onUnbind(String sessionId) { getStore().unbind(sessionId); } }
jdk-1.4/wicket/src/main/java/org/apache/wicket/protocol/http/SecondLevelCacheSessionStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.protocol.http; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.apache.wicket.Application; import org.apache.wicket.Component; import org.apache.wicket.IClusterable; import org.apache.wicket.IPageMap; import org.apache.wicket.Page; import org.apache.wicket.PageMap; import org.apache.wicket.Request; import org.apache.wicket.session.pagemap.IPageMapEntry; import org.apache.wicket.util.collections.IntHashMap; import org.apache.wicket.version.IPageVersionManager; import org.apache.wicket.version.undo.Change; /** * FIXME document me! * * @author jcompagner */ public class SecondLevelCacheSessionStore extends HttpSessionStore { /** * This interface is used by the SecondLevelCacheSessionStore so that pages * can be stored to a persistent layer. Implementation should store the page * that it gets under the id and version number. So that every page version * can be reconstructed when asked for. * * @see FilePageStore as default implementation. */ public static interface IPageStore { /** * */ void destroy(); /** * Restores a page version from the persistent layer * * @param sessionId * @param pagemap * @param id * @param versionNumber * @param ajaxVersionNumber * @return The page */ Page getPage(String sessionId, String pagemap, int id, int versionNumber, int ajaxVersionNumber); /** * This method is called when the page is accessed. A IPageStore * implementation can block until a save of that page version is done. So * that a specific page version is always restore able. * * @param sessionId * @param page */ void pageAccessed(String sessionId, Page page); /** * Removes a page from the persistent layer. * * @param sessionId The session of the page that must be removed * @param pagemap The pagemap of the page that must be removed * @param id The id of the page. */ void removePage(String sessionId, String pagemap, int id); /** * Stores the page to a persistent layer. The page should be stored * under the id and the version number. * * @param sessionId * @param page */ void storePage(String sessionId, Page page); /** * The pagestore should cleanup all the pages for that sessionid. * * @param sessionId */ void unbind(String sessionId); } private static final class SecondLevelCachePageMap extends PageMap { private static final long serialVersionUID = 1L; private Page lastPage = null; private List pageVersions = new ArrayList(); /** * Construct. * * @param name */ private SecondLevelCachePageMap(String name) { super(name); } public Page get(int id, int versionNumber) { HashMap pageMaps = (HashMap)usedPages.get(); if (pageMaps == null) { pageMaps = new HashMap(); usedPages.set(pageMaps); } IntHashMap pages = (IntHashMap)pageMaps.get(getName()); if (pages == null) { pages = new IntHashMap(); pageMaps.put(getName(), pages); } // for now i only get by id. // does it really make any sense that there are multiply instances of the // same page are alive in one session?? Page page = (Page)pages.get(id); if (page != null) return page; PageVersions pv = null; if (versionNumber == -1) { // when no version was specified, get the last touched page for given page id for (int index = pageVersions.size() - 1; index >= 0; --index) { if (((PageVersions)pageVersions.get(index)).pageid == id) { pv = (PageVersions)pageVersions.get(index); versionNumber = pv.versionid; break; } } } String sessionId = getSession().getId(); if (lastPage != null && lastPage.getNumericId() == id) { page = lastPage.getVersion(versionNumber); if (page != null) { // ask the page store if it is ready saving the page. getStore().pageAccessed(sessionId, page); pages.put(id,page); return page; } } if (sessionId != null) { int ajaxVersionNumber = 0; if (pv == null) { int index = pageVersions.indexOf(new PageVersions(id, versionNumber,-1)); if (index != -1) { pv = (PageVersions)pageVersions.get(index); } } if (pv != null) { ajaxVersionNumber = pv.ajaxversionid; } lastPage = null; page = getStore().getPage(sessionId, getName(), id, versionNumber, ajaxVersionNumber); pages.put(id,page); return page; } return null; } public void put(Page page) { if (!page.isPageStateless()) { String sessionId = getSession().getId(); if (sessionId != null) { getStore().storePage(sessionId, page); lastPage = page; dirty(); PageVersions pv = new PageVersions(page.getNumericId(),page.getCurrentVersionNumber(),page.getAjaxVersionNumber()); pageVersions.remove(pv); pageVersions.add(pv); if (pageVersions.size() > 100) { pageVersions.remove(0); } } } } /** * @see org.apache.wicket.PageMap#clear() */ public void clear() { super.clear(); String sessionId = getSession().getId(); if (sessionId != null) { getStore().removePage(sessionId, getName(),-1); } } public void removeEntry(IPageMapEntry entry) { String sessionId = getSession().getId(); if (sessionId != null) { getStore().removePage(sessionId, getName(), entry.getNumericId()); } } private IPageStore getStore() { return ((SecondLevelCacheSessionStore)Application.get().getSessionStore()).getStore(); } private static class PageVersions implements IClusterable { private static final long serialVersionUID = 1L; private final int pageid; private int versionid; private int ajaxversionid; PageVersions(int pageid, int versionid, int ajaxversionid) { this.pageid = pageid; this.versionid = versionid; this.ajaxversionid = ajaxversionid; } /** * @see java.lang.Object#equals(java.lang.Object) */ public boolean equals(Object obj) { if (obj instanceof PageVersions) { return ((PageVersions)obj).pageid == pageid && ((PageVersions)obj).versionid == versionid; } return false; } /** * @see java.lang.Object#hashCode() */ public int hashCode() { return pageid; } } } private static final class SecondLevelCachePageVersionManager implements IPageVersionManager { private static final long serialVersionUID = 1L; private short currentVersionNumber; private short currentAjaxVersionNumber; private short lastAjaxVersionNumber; private Page page; private transient boolean versionStarted; /** * Construct. * * @param page */ public SecondLevelCachePageVersionManager(Page page) { this.page = page; } /** * @see org.apache.wicket.version.IPageVersionManager#beginVersion(boolean) */ public void beginVersion(boolean mergeVersion) { // this is an hack.. when object is read in. It must ignore the // first version bump. if (versionStarted) return; versionStarted = true; if (!mergeVersion) { currentVersionNumber++; lastAjaxVersionNumber = currentAjaxVersionNumber; currentAjaxVersionNumber = 0; } else { currentAjaxVersionNumber++; } } /** * @see org.apache.wicket.version.IPageVersionManager#componentAdded(org.apache.wicket.Component) */ public void componentAdded(Component component) { } /** * @see org.apache.wicket.version.IPageVersionManager#componentModelChanging(org.apache.wicket.Component) */ public void componentModelChanging(Component component) { } /** * @see org.apache.wicket.version.IPageVersionManager#componentRemoved(org.apache.wicket.Component) */ public void componentRemoved(Component component) { } /** * @see org.apache.wicket.version.IPageVersionManager#componentStateChanging(org.apache.wicket.version.undo.Change) */ public void componentStateChanging(Change change) { } /** * @see org.apache.wicket.version.IPageVersionManager#endVersion(boolean) */ public void endVersion(boolean mergeVersion) { versionStarted = false; String sessionId = page.getSession().getId(); if (sessionId != null) { IPageStore store = ((SecondLevelCacheSessionStore)Application.get() .getSessionStore()).getStore(); store.storePage(sessionId, page); } } /** * @see org.apache.wicket.version.IPageVersionManager#expireOldestVersion() */ public void expireOldestVersion() { } /** * @see org.apache.wicket.version.IPageVersionManager#getAjaxVersionNumber() */ public int getAjaxVersionNumber() { return currentAjaxVersionNumber; } /** * @see org.apache.wicket.version.IPageVersionManager#getCurrentVersionNumber() */ public int getCurrentVersionNumber() { return currentVersionNumber; } /** * @see org.apache.wicket.version.IPageVersionManager#getVersion(int) */ public Page getVersion(int versionNumber) { if (currentVersionNumber == versionNumber) { return page; } return null; } /** * @see org.apache.wicket.version.IPageVersionManager#getVersions() */ public int getVersions() { return 0; } /** * @see org.apache.wicket.version.IPageVersionManager#ignoreVersionMerge() */ public void ignoreVersionMerge() { currentVersionNumber++; lastAjaxVersionNumber = currentAjaxVersionNumber; currentAjaxVersionNumber = 0; } /** * @see org.apache.wicket.version.IPageVersionManager#rollbackPage(int) */ public Page rollbackPage(int numberOfVersions) { String sessionId = page.getSession().getId(); if (sessionId != null) { int versionNumber = currentVersionNumber; int ajaxNumber = currentAjaxVersionNumber; if (versionStarted) { versionNumber--; ajaxNumber--; } IPageStore store = ((SecondLevelCacheSessionStore)Application.get() .getSessionStore()).getStore(); // if the number of versions to rollback can be done inside the // current page version. if (ajaxNumber >= numberOfVersions) { return store.getPage(sessionId, page.getPageMapName(), page .getNumericId(), versionNumber, ajaxNumber - numberOfVersions); } else { // else go one page version down. versionNumber--; // then calculate the previous ajax version by looking at // the last ajax number of the previous version. ajaxNumber = lastAjaxVersionNumber - (numberOfVersions - ajaxNumber); if (ajaxNumber < 0) { // currently it is not supported to jump over 2 // pages.... log.error("trying to rollback to many versions, jumping over 2 page versions is not supported yet."); return null; } return store.getPage(sessionId, page.getPageMapName(), page .getNumericId(), versionNumber, ajaxNumber); } } return null; } private void readObject(java.io.ObjectInputStream s) throws IOException, ClassNotFoundException { s.defaultReadObject(); // this is an hack.. when object is read in. It must ignore the // first version bump. // (matej_k) for now, I'm commenting it out. It causes serious // trouble with back // button, where new versions are not created as they should be // johan promised to look at it soon // versionStarted = true; } } static final ThreadLocal usedPages = new ThreadLocal(); private IPageStore pageStore; /** * Construct. * * @param application * The application for this store * * @param pageStore * Page store for keeping page versions */ public SecondLevelCacheSessionStore(Application application, final IPageStore pageStore) { super(application); this.pageStore = pageStore; // turn automatic multi window support off by default, as we don't really // need to be afraid to run out of history with this implementation. // note that the session store is created before Application#init is // called, so if users set this setting explicitly, it'll be overridden // (and that's exactly what we want: provide a better default, but not // forcing people to do away with this feature). Application.get().getPageSettings().setAutomaticMultiWindowSupport(false); } /** * @see org.apache.wicket.protocol.http.HttpSessionStore#createPageMap(java.lang.String, * org.apache.wicket.Session) */ public IPageMap createPageMap(String name) { return new SecondLevelCachePageMap(name); } /** * @see org.apache.wicket.protocol.http.AbstractHttpSessionStore#onEndRequest(org.apache.wicket.Request) */ public void onEndRequest(Request request) { super.onEndRequest(request); usedPages.set(null); } /** * @see org.apache.wicket.protocol.http.AbstractHttpSessionStore#destroy() */ public void destroy() { super.destroy(); getStore().destroy(); } /** * @return The store to use */ public IPageStore getStore() { return pageStore; } /** * @see org.apache.wicket.protocol.http.HttpSessionStore#newVersionManager(org.apache.wicket.Page) */ public IPageVersionManager newVersionManager(Page page) { return new SecondLevelCachePageVersionManager(page); } /** * @see org.apache.wicket.session.ISessionStore#setAttribute(org.apache.wicket.Request, * java.lang.String, java.lang.Object) */ public void setAttribute(Request request, String name, Object value) { // ignore all pages, they are stored through the pagemap if (!(value instanceof Page)) { super.setAttribute(request, name, value); } } /** * @see org.apache.wicket.protocol.http.AbstractHttpSessionStore#onUnbind(java.lang.String) */ protected void onUnbind(String sessionId) { getStore().unbind(sessionId); } }
docs, formatting git-svn-id: ac804e38dcddf5e42ac850d29d9218b7df6087b7@551394 13f79535-47bb-0310-9956-ffa450edef68
jdk-1.4/wicket/src/main/java/org/apache/wicket/protocol/http/SecondLevelCacheSessionStore.java
docs, formatting
<ide><path>dk-1.4/wicket/src/main/java/org/apache/wicket/protocol/http/SecondLevelCacheSessionStore.java <ide> { <ide> <ide> /** <del> * <add> * Destroy the store. <ide> */ <ide> void destroy(); <ide> <ide> <ide> /** <ide> * This method is called when the page is accessed. A IPageStore <del> * implementation can block until a save of that page version is done. So <del> * that a specific page version is always restore able. <add> * implementation can block until a save of that page version is done. <add> * So that a specific page version is always restore able. <ide> * <ide> * @param sessionId <ide> * @param page <ide> /** <ide> * Removes a page from the persistent layer. <ide> * <del> * @param sessionId The session of the page that must be removed <del> * @param pagemap The pagemap of the page that must be removed <del> * @param id The id of the page. <add> * @param sessionId <add> * The session of the page that must be removed <add> * @param pagemap <add> * The pagemap of the page that must be removed <add> * @param id <add> * The id of the page. <ide> */ <ide> void removePage(String sessionId, String pagemap, int id); <ide> <ide> * @param sessionId <ide> */ <ide> void unbind(String sessionId); <del> <del> } <del> <add> } <add> <add> /** <add> * Page map implementation for this session store. <add> */ <ide> private static final class SecondLevelCachePageMap extends PageMap <ide> { <ide> private static final long serialVersionUID = 1L; <ide> <ide> private Page lastPage = null; <del> <add> <ide> private List pageVersions = new ArrayList(); <del> <add> <ide> <ide> /** <ide> * Construct. <ide> super(name); <ide> } <ide> <add> /** <add> * @see org.apache.wicket.PageMap#get(int, int) <add> */ <ide> public Page get(int id, int versionNumber) <ide> { <ide> HashMap pageMaps = (HashMap)usedPages.get(); <ide> pages = new IntHashMap(); <ide> pageMaps.put(getName(), pages); <ide> } <del> <add> <ide> // for now i only get by id. <del> // does it really make any sense that there are multiply instances of the <add> // does it really make any sense that there are multiply instances <add> // of the <ide> // same page are alive in one session?? <ide> Page page = (Page)pages.get(id); <del> if (page != null) return page; <del> <add> if (page != null) <add> return page; <add> <ide> PageVersions pv = null; <ide> if (versionNumber == -1) <ide> { <del> // when no version was specified, get the last touched page for given page id <del> for (int index = pageVersions.size() - 1; index >= 0; --index) <del> { <del> if (((PageVersions)pageVersions.get(index)).pageid == id) <add> // when no version was specified, get the last touched page for <add> // given page id <add> for (int index = pageVersions.size() - 1; index >= 0; --index) <add> { <add> if (((PageVersions)pageVersions.get(index)).pageid == id) <ide> { <ide> pv = (PageVersions)pageVersions.get(index); <ide> versionNumber = pv.versionid; <ide> { <ide> // ask the page store if it is ready saving the page. <ide> getStore().pageAccessed(sessionId, page); <del> pages.put(id,page); <add> pages.put(id, page); <ide> return page; <ide> } <ide> } <ide> int ajaxVersionNumber = 0; <ide> if (pv == null) <ide> { <del> int index = pageVersions.indexOf(new PageVersions(id, versionNumber,-1)); <add> int index = pageVersions.indexOf(new PageVersions(id, versionNumber, -1)); <ide> if (index != -1) <ide> { <ide> pv = (PageVersions)pageVersions.get(index); <ide> ajaxVersionNumber = pv.ajaxversionid; <ide> } <ide> lastPage = null; <del> page = getStore().getPage(sessionId, getName(), id, versionNumber, ajaxVersionNumber); <del> pages.put(id,page); <add> page = getStore().getPage(sessionId, getName(), id, versionNumber, <add> ajaxVersionNumber); <add> pages.put(id, page); <ide> return page; <del> <add> <ide> } <ide> return null; <ide> } <ide> <add> /** <add> * @see org.apache.wicket.PageMap#put(org.apache.wicket.Page) <add> */ <ide> public void put(Page page) <ide> { <ide> if (!page.isPageStateless()) <ide> getStore().storePage(sessionId, page); <ide> lastPage = page; <ide> dirty(); <del> <del> PageVersions pv = new PageVersions(page.getNumericId(),page.getCurrentVersionNumber(),page.getAjaxVersionNumber()); <add> <add> PageVersions pv = new PageVersions(page.getNumericId(), page <add> .getCurrentVersionNumber(), page.getAjaxVersionNumber()); <ide> pageVersions.remove(pv); <ide> pageVersions.add(pv); <ide> if (pageVersions.size() > 100) <ide> } <ide> } <ide> } <del> <add> <ide> /** <ide> * @see org.apache.wicket.PageMap#clear() <ide> */ <ide> String sessionId = getSession().getId(); <ide> if (sessionId != null) <ide> { <del> getStore().removePage(sessionId, getName(),-1); <del> } <del> } <del> <add> getStore().removePage(sessionId, getName(), -1); <add> } <add> } <add> <add> /** <add> * @see org.apache.wicket.PageMap#removeEntry(org.apache.wicket.session.pagemap.IPageMapEntry) <add> */ <ide> public void removeEntry(IPageMapEntry entry) <ide> { <ide> String sessionId = getSession().getId(); <ide> { <ide> return ((SecondLevelCacheSessionStore)Application.get().getSessionStore()).getStore(); <ide> } <del> <del> private static class PageVersions implements IClusterable <add> <add> private static class PageVersions implements IClusterable <ide> { <ide> private static final long serialVersionUID = 1L; <del> <add> <ide> private final int pageid; <ide> private int versionid; <ide> private int ajaxversionid; <del> <add> <ide> PageVersions(int pageid, int versionid, int ajaxversionid) <ide> { <ide> this.pageid = pageid; <ide> this.versionid = versionid; <ide> this.ajaxversionid = ajaxversionid; <ide> } <del> <add> <ide> /** <ide> * @see java.lang.Object#equals(java.lang.Object) <ide> */ <ide> { <ide> if (obj instanceof PageVersions) <ide> { <del> return ((PageVersions)obj).pageid == pageid && <del> ((PageVersions)obj).versionid == versionid; <add> return ((PageVersions)obj).pageid == pageid <add> && ((PageVersions)obj).versionid == versionid; <ide> } <ide> return false; <ide> } <del> <add> <ide> /** <ide> * @see java.lang.Object#hashCode() <ide> */ <ide> } <ide> } <ide> <add> /** <add> * version manager for this session store. <add> */ <ide> private static final class SecondLevelCachePageVersionManager implements IPageVersionManager <ide> { <ide> private static final long serialVersionUID = 1L; <ide> // current page version. <ide> if (ajaxNumber >= numberOfVersions) <ide> { <del> return store.getPage(sessionId, page.getPageMapName(), page <del> .getNumericId(), versionNumber, ajaxNumber - numberOfVersions); <add> return store.getPage(sessionId, page.getPageMapName(), page.getNumericId(), <add> versionNumber, ajaxNumber - numberOfVersions); <ide> } <ide> else <ide> { <ide> { <ide> // currently it is not supported to jump over 2 <ide> // pages.... <del> log.error("trying to rollback to many versions, jumping over 2 page versions is not supported yet."); <add> log <add> .error("trying to rollback to many versions, jumping over 2 page versions is not supported yet."); <ide> return null; <ide> } <del> return store.getPage(sessionId, page.getPageMapName(), page <del> .getNumericId(), versionNumber, ajaxNumber); <add> return store.getPage(sessionId, page.getPageMapName(), page.getNumericId(), <add> versionNumber, ajaxNumber); <ide> } <ide> } <ide> <ide> } <ide> <ide> } <del> <add> <ide> static final ThreadLocal usedPages = new ThreadLocal(); <ide> <del> private IPageStore pageStore; <add> private final IPageStore pageStore; <ide> <ide> /** <ide> * Construct. <ide> <ide> this.pageStore = pageStore; <ide> <del> // turn automatic multi window support off by default, as we don't really <add> // turn automatic multi window support off by default, as we don't <add> // really <ide> // need to be afraid to run out of history with this implementation. <ide> // note that the session store is created before Application#init is <ide> // called, so if users set this setting explicitly, it'll be overridden <ide> super.onEndRequest(request); <ide> usedPages.set(null); <ide> } <add> <ide> /** <ide> * @see org.apache.wicket.protocol.http.AbstractHttpSessionStore#destroy() <ide> */
Java
apache-2.0
1db042ee33601bad928b2f3e7d65f3cb9fa80d43
0
jeorme/OG-Platform,nssales/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,jerome79/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,ChinaQuants/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,codeaudit/OG-Platform,ChinaQuants/OG-Platform,jerome79/OG-Platform,DevStreet/FinanceAnalytics,DevStreet/FinanceAnalytics,McLeodMoores/starling,ChinaQuants/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.util.result; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertFalse; import static org.testng.AssertJUnit.assertTrue; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.testng.annotations.Test; import com.opengamma.util.test.TestGroup; @Test(groups = TestGroup.UNIT) public class ResultTest { @Test public void anyFailures() { Result<String> success1 = Result.success("success 1"); Result<String> success2 = Result.success("success 1"); Result<Object> failure1 = Result.failure(FailureStatus.MISSING_DATA, "failure 1"); Result<Object> failure2 = Result.failure(FailureStatus.ERROR, "failure 2"); assertTrue(Result.anyFailures(failure1, failure2)); assertTrue(Result.anyFailures(failure1, success1)); assertFalse(Result.anyFailures(success1, success2)); } @SuppressWarnings("unchecked") @Test public void propagateFailures() { Result<String> success1 = Result.success("success 1"); Result<String> success2 = Result.success("success 1"); Result<Object> failure1 = Result.failure(FailureStatus.MISSING_DATA, "failure 1"); Result<Object> failure2 = Result.failure(FailureStatus.ERROR, "failure 2"); Result<Object> composite1 = Result.failure(success1, success2, failure1, failure2); Collection<Failure> failures = composite1.getFailures(); Set<Failure> expected = new HashSet<>(); expected.addAll(failure1.getFailures()); expected.addAll(failure2.getFailures()); assertEquals(expected, failures); } @Test(expectedExceptions = IllegalArgumentException.class) public void propagateSuccesses() { Result<String> success1 = Result.success("success 1"); Result<String> success2 = Result.success("success 1"); Result.failure(success1, success2); } }
projects/OG-Util/src/test/java/com/opengamma/util/result/ResultTest.java
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.util.result; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertFalse; import static org.testng.AssertJUnit.assertTrue; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.testng.annotations.Test; import com.opengamma.util.test.TestGroup; @Test(groups = TestGroup.UNIT) public class ResultTest { @Test public void anyFailures() { Result<String> success1 = Result.success("success 1"); Result<String> success2 = Result.success("success 1"); Result<Object> failure1 = Result.failure(FailureStatus.MISSING_DATA, "failure 1"); Result<Object> failure2 = Result.failure(FailureStatus.ERROR, "failure 2"); assertTrue(Result.anyFailures(failure1, failure2)); assertTrue(Result.anyFailures(failure1, success1)); assertFalse(Result.anyFailures(success1, success2)); } @SuppressWarnings("unchecked") @Test public void propagateFailures() { Result<String> success1 = Result.success("success 1"); Result<String> success2 = Result.success("success 1"); Result<Object> failure1 = Result.failure(FailureStatus.MISSING_DATA, "failure 1"); Result<Object> failure2 = Result.failure(FailureStatus.ERROR, "failure 2"); Result<Object> composite1 = Result.failure(success1, success2, failure1, failure2); Collection<Failure> failures = composite1.getFailures(); List<Failure> expected = new ArrayList<>(); expected.addAll(failure1.getFailures()); expected.addAll(failure2.getFailures()); assertEquals(expected, failures); } @Test(expectedExceptions = IllegalArgumentException.class) public void propagateSuccesses() { Result<String> success1 = Result.success("success 1"); Result<String> success2 = Result.success("success 1"); Result.failure(success1, success2); } }
Fix test
projects/OG-Util/src/test/java/com/opengamma/util/result/ResultTest.java
Fix test
<ide><path>rojects/OG-Util/src/test/java/com/opengamma/util/result/ResultTest.java <ide> import static org.testng.AssertJUnit.assertFalse; <ide> import static org.testng.AssertJUnit.assertTrue; <ide> <del>import java.util.ArrayList; <ide> import java.util.Collection; <del>import java.util.List; <add>import java.util.HashSet; <add>import java.util.Set; <ide> <ide> import org.testng.annotations.Test; <ide> <ide> Result<Object> failure2 = Result.failure(FailureStatus.ERROR, "failure 2"); <ide> Result<Object> composite1 = Result.failure(success1, success2, failure1, failure2); <ide> Collection<Failure> failures = composite1.getFailures(); <del> List<Failure> expected = new ArrayList<>(); <add> Set<Failure> expected = new HashSet<>(); <ide> expected.addAll(failure1.getFailures()); <ide> expected.addAll(failure2.getFailures()); <ide> assertEquals(expected, failures);
Java
apache-2.0
93035e5c9131c008f864cc4cc1f607d60775f335
0
mariosotil/couchbase-lite-java-core,mariosotil/couchbase-lite-java-core,netsense-sas/couchbase-lite-java-core,couchbase/couchbase-lite-java-core,4u7/couchbase-lite-java-core,mariosotil/couchbase-lite-java-core,gotmyjobs/couchbase-lite-java-core,Spotme/couchbase-lite-java-core
package com.couchbase.lite.replicator; import com.couchbase.lite.CouchbaseLiteException; import com.couchbase.lite.Database; import com.couchbase.lite.Manager; import com.couchbase.lite.Misc; import com.couchbase.lite.RevisionList; import com.couchbase.lite.Status; import com.couchbase.lite.internal.InterfaceAudience; import com.couchbase.lite.internal.RevisionInternal; import com.couchbase.lite.storage.SQLException; import com.couchbase.lite.support.BatchProcessor; import com.couchbase.lite.support.Batcher; import com.couchbase.lite.support.HttpClientFactory; import com.couchbase.lite.support.RemoteRequestCompletionBlock; import com.couchbase.lite.support.SequenceMap; import com.couchbase.lite.util.CollectionUtils; import com.couchbase.lite.util.Log; import com.couchbase.lite.util.Utils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.HttpResponseException; import java.net.URL; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; /** * Pull Replication * * @exclude */ @InterfaceAudience.Private public class PullerInternal extends ReplicationInternal implements ChangeTrackerClient{ private static final int MAX_OPEN_HTTP_CONNECTIONS = 16; // Maximum number of revs to fetch in a single bulk request public static final int MAX_REVS_TO_GET_IN_BULK = 50; // Maximum number of revision IDs to pass in an "?atts_since=" query param public static final int MAX_NUMBER_OF_ATTS_SINCE = 50; public static int CHANGE_TRACKER_RESTART_DELAY_MS = 10 * 1000; private ChangeTracker changeTracker; protected SequenceMap pendingSequences; protected Boolean canBulkGet; // Does the server support _bulk_get requests? protected List<RevisionInternal> revsToPull; protected List<RevisionInternal> bulkRevsToPull; protected List<RevisionInternal> deletedRevsToPull; protected int httpConnectionCount; protected Batcher<RevisionInternal> downloadsToInsert; public PullerInternal(Database db, URL remote, HttpClientFactory clientFactory, ScheduledExecutorService workExecutor, Replication.Lifecycle lifecycle, Replication parentReplication) { super(db, remote, clientFactory, workExecutor, lifecycle, parentReplication); } /** * Actual work of starting the replication process. */ protected void beginReplicating() { Log.d(Log.TAG_SYNC, "startReplicating()"); initPendingSequences(); initDownloadsToInsert(); startChangeTracker(); // start replicator .. } private void initDownloadsToInsert() { if (downloadsToInsert == null) { int capacity = 200; int delay = 1000; downloadsToInsert = new Batcher<RevisionInternal>(workExecutor, capacity, delay, new BatchProcessor<RevisionInternal>() { @Override public void process(List<RevisionInternal> inbox) { insertDownloads(inbox); } }); } } public boolean isPull() { return true; } /* package */ void maybeCreateRemoteDB() { // puller never needs to do this } protected void startChangeTracker() { ChangeTracker.ChangeTrackerMode changeTrackerMode; // it always starts out as OneShot, but if its a continuous replication // it will switch to longpoll later. changeTrackerMode = ChangeTracker.ChangeTrackerMode.OneShot; Log.w(Log.TAG_SYNC, "%s: starting ChangeTracker with since=%s mode=%s", this, lastSequence, changeTrackerMode); changeTracker = new ChangeTracker(remote, changeTrackerMode, true, lastSequence, this); changeTracker.setAuthenticator(getAuthenticator()); Log.w(Log.TAG_SYNC, "%s: started ChangeTracker %s", this, changeTracker); if (filterName != null) { changeTracker.setFilterName(filterName); if (filterParams != null) { changeTracker.setFilterParams(filterParams); } } changeTracker.setDocIDs(documentIDs); changeTracker.setRequestHeaders(requestHeaders); changeTracker.setContinuous(lifecycle == Replication.Lifecycle.CONTINUOUS); changeTracker.setUsePOST(serverIsSyncGatewayVersion("0.93")); changeTracker.start(); } /** * Process a bunch of remote revisions from the _changes feed at once */ @Override @InterfaceAudience.Private protected void processInbox(RevisionList inbox) { Log.d(Log.TAG_SYNC, "processInbox called"); if (canBulkGet == null) { canBulkGet = serverIsSyncGatewayVersion("0.81"); } // Ask the local database which of the revs are not known to it: String lastInboxSequence = ((PulledRevision) inbox.get(inbox.size() - 1)).getRemoteSequenceID(); int numRevisionsRemoved = 0; try { // findMissingRevisions is the local equivalent of _revs_diff. it looks at the // array of revisions in ‘inbox’ and removes the ones that already exist. So whatever’s left in ‘inbox’ // afterwards are the revisions that need to be downloaded. numRevisionsRemoved = db.findMissingRevisions(inbox); } catch (SQLException e) { Log.e(Log.TAG_SYNC, String.format("%s failed to look up local revs", this), e); inbox = null; } //introducing this to java version since inbox may now be null everywhere int inboxCount = 0; if (inbox != null) { inboxCount = inbox.size(); } if (numRevisionsRemoved > 0) { Log.v(Log.TAG_SYNC, "%s: processInbox() setting changesCount to: %s", this, getChangesCount().get() - numRevisionsRemoved); // May decrease the changesCount, to account for the revisions we just found out we don’t need to get. addToChangesCount(-1 * numRevisionsRemoved); } if (inboxCount == 0) { // Nothing to do. Just bump the lastSequence. Log.w(Log.TAG_SYNC, "%s no new remote revisions to fetch. add lastInboxSequence (%s) to pendingSequences (%s)", this, lastInboxSequence, pendingSequences); long seq = pendingSequences.addValue(lastInboxSequence); pendingSequences.removeSequence(seq); setLastSequence(pendingSequences.getCheckpointedValue()); return; } Log.v(Log.TAG_SYNC, "%s: fetching %s remote revisions...", this, inboxCount); // Dump the revs into the queue of revs to pull from the remote db: int numBulked = 0; for (int i = 0; i < inbox.size(); i++) { PulledRevision rev = (PulledRevision) inbox.get(i); //TODO: add support for rev isConflicted if (canBulkGet || (rev.getGeneration() == 1 && !rev.isDeleted())) { // &&!rev.isConflicted) //optimistically pull 1st-gen revs in bulk if (bulkRevsToPull == null) bulkRevsToPull = new ArrayList<RevisionInternal>(100); bulkRevsToPull.add(rev); ++numBulked; } else { queueRemoteRevision(rev); } rev.setSequence(pendingSequences.addValue(rev.getRemoteSequenceID())); } pullRemoteRevisions(); } /** * Start up some HTTP GETs, within our limit on the maximum simultaneous number * <p/> * The entire method is not synchronized, only the portion pulling work off the list * Important to not hold the synchronized block while we do network access */ @InterfaceAudience.Private public void pullRemoteRevisions() { //find the work to be done in a synchronized block List<RevisionInternal> workToStartNow = new ArrayList<RevisionInternal>(); List<RevisionInternal> bulkWorkToStartNow = new ArrayList<RevisionInternal>(); while (httpConnectionCount + workToStartNow.size() < MAX_OPEN_HTTP_CONNECTIONS) { int nBulk = 0; if (bulkRevsToPull != null) { nBulk = (bulkRevsToPull.size() < MAX_REVS_TO_GET_IN_BULK) ? bulkRevsToPull.size() : MAX_REVS_TO_GET_IN_BULK; } if (nBulk == 1) { // Rather than pulling a single revision in 'bulk', just pull it normally: queueRemoteRevision(bulkRevsToPull.get(0)); bulkRevsToPull.remove(0); nBulk = 0; } if (nBulk > 0) { // Prefer to pull bulk revisions: bulkWorkToStartNow.addAll(bulkRevsToPull.subList(0, nBulk)); bulkRevsToPull.subList(0, nBulk).clear(); } else { // Prefer to pull an existing revision over a deleted one: List<RevisionInternal> queue = revsToPull; if (queue == null || queue.size() == 0) { queue = deletedRevsToPull; if (queue == null || queue.size() == 0) break; // both queues are empty } workToStartNow.add(queue.get(0)); queue.remove(0); } } //actually run it outside the synchronized block if(bulkWorkToStartNow.size() > 0) { pullBulkRevisions(bulkWorkToStartNow); } for (RevisionInternal work : workToStartNow) { pullRemoteRevision(work); } } // Get a bunch of revisions in one bulk request. Will use _bulk_get if possible. protected void pullBulkRevisions(List<RevisionInternal> bulkRevs) { int nRevs = bulkRevs.size(); if (nRevs == 0) { return; } Log.v(Log.TAG_SYNC, "%s bulk-fetching %d remote revisions...", this, nRevs); Log.v(Log.TAG_SYNC, "%s bulk-fetching remote revisions: %s", this, bulkRevs); if (!canBulkGet) { pullBulkWithAllDocs(bulkRevs); return; } Log.v(Log.TAG_SYNC, "%s: POST _bulk_get", this); final List<RevisionInternal> remainingRevs = new ArrayList<RevisionInternal>(bulkRevs); ++httpConnectionCount; final BulkDownloader dl; try { dl = new BulkDownloader(workExecutor, clientFactory, remote, bulkRevs, db, this.requestHeaders, new BulkDownloader.BulkDownloaderDocumentBlock() { public void onDocument(Map<String, Object> props) { // Got a revision! // Find the matching revision in 'remainingRevs' and get its sequence: RevisionInternal rev; if (props.get("_id") != null) { rev = new RevisionInternal(props, db); } else { rev = new RevisionInternal((String) props.get("id"), (String) props.get("rev"), false, db); } int pos = remainingRevs.indexOf(rev); if (pos > -1) { rev.setSequence(remainingRevs.get(pos).getSequence()); remainingRevs.remove(pos); } else { Log.w(Log.TAG_SYNC, "%s : Received unexpected rev rev", this); } if (props.get("_id") != null) { // Add to batcher ... eventually it will be fed to -insertRevisions:. queueDownloadedRevision(rev); } else { Status status = statusFromBulkDocsResponseItem(props); error = new CouchbaseLiteException(status); revisionFailed(rev, error); } } }, new RemoteRequestCompletionBlock() { public void onCompletion(HttpResponse httpResponse, Object result, Throwable e) { // The entire _bulk_get is finished: if (e != null) { setError(e); revisionFailed(); completedChangesCount.addAndGet(remainingRevs.size()); } --httpConnectionCount; // Start another task if there are still revisions waiting to be pulled: pullRemoteRevisions(); } } ); } catch (Exception e) { Log.e(Log.TAG_SYNC, "%s: pullBulkRevisions Exception: %s", this, e); return; } dl.setAuthenticator(getAuthenticator()); Future future = remoteRequestExecutor.submit(dl); pendingFutures.add(future); } // This invokes the tranformation block if one is installed and queues the resulting CBL_Revision private void queueDownloadedRevision(RevisionInternal rev) { if (revisionBodyTransformationBlock != null) { // Add 'file' properties to attachments pointing to their bodies: for (Map.Entry<String, Map<String, Object>> entry : ((Map<String, Map<String, Object>>) rev.getProperties().get("_attachments")).entrySet()) { String name = entry.getKey(); Map<String, Object> attachment = entry.getValue(); attachment.remove("file"); if (attachment.get("follows") != null && attachment.get("data") == null) { String filePath = db.fileForAttachmentDict(attachment).getPath(); if (filePath != null) attachment.put("file", filePath); } } RevisionInternal xformed = transformRevision(rev); if (xformed == null) { Log.v(Log.TAG_SYNC, "%s: Transformer rejected revision %s", this, rev); pendingSequences.removeSequence(rev.getSequence()); lastSequence = pendingSequences.getCheckpointedValue(); return; } rev = xformed; // Clean up afterwards Map<String, Object> attachments = (Map<String, Object>) rev.getProperties().get("_attachments"); for (Map.Entry<String, Map<String, Object>> entry : ((Map<String, Map<String, Object>>) rev.getProperties().get("_attachments")).entrySet()) { Map<String, Object> attachment = entry.getValue(); attachment.remove("file"); } } //TODO: rev.getBody().compact(); downloadsToInsert.queueObject(rev); } // Get as many revisions as possible in one _all_docs request. // This is compatible with CouchDB, but it only works for revs of generation 1 without attachments. protected void pullBulkWithAllDocs(final List<RevisionInternal> bulkRevs) { // http://wiki.apache.org/couchdb/HTTP_Bulk_Document_API ++httpConnectionCount; final RevisionList remainingRevs = new RevisionList(bulkRevs); Collection<String> keys = CollectionUtils.transform(bulkRevs, new CollectionUtils.Functor<RevisionInternal, String>() { public String invoke(RevisionInternal rev) { return rev.getDocId(); } } ); Map<String, Object> body = new HashMap<String, Object>(); body.put("keys", keys); Future future = sendAsyncRequest("POST", "/_all_docs?include_docs=true", body, new RemoteRequestCompletionBlock() { public void onCompletion(HttpResponse httpResponse, Object result, Throwable e) { Map<String, Object> res = (Map<String, Object>) result; if (e != null) { setError(e); revisionFailed(); // TODO: There is a known bug caused by the line below, which is // TODO: causing testMockSinglePullCouchDb to fail when running on a Nexus5 device. // TODO: (the batching behavior is different in that case) // TODO: See https://github.com/couchbase/couchbase-lite-java-core/issues/271 // completedChangesCount.addAndGet(bulkRevs.size()); } else { // Process the resulting rows' documents. // We only add a document if it doesn't have attachments, and if its // revID matches the one we asked for. List<Map<String, Object>> rows = (List<Map<String, Object>>) res.get("rows"); Log.v(Log.TAG_SYNC, "%s checking %d bulk-fetched remote revisions", this, rows.size()); for (Map<String, Object> row : rows) { Map<String, Object> doc = (Map<String, Object>) row.get("doc"); if (doc != null && doc.get("_attachments") == null) { RevisionInternal rev = new RevisionInternal(doc, db); RevisionInternal removedRev = remainingRevs.removeAndReturnRev(rev); if (removedRev != null) { rev.setSequence(removedRev.getSequence()); queueDownloadedRevision(rev); } } else { Status status = statusFromBulkDocsResponseItem(row); if (status.isError() && row.containsKey("key") && row.get("key") != null) { RevisionInternal rev = remainingRevs.revWithDocId((String)row.get("key")); if (rev != null) { remainingRevs.remove(rev); revisionFailed(rev, new CouchbaseLiteException(status)); } } } } } // Any leftover revisions that didn't get matched will be fetched individually: if (remainingRevs.size() > 0) { Log.v(Log.TAG_SYNC, "%s bulk-fetch didn't work for %d of %d revs; getting individually", this, remainingRevs.size(), bulkRevs.size()); for (RevisionInternal rev : remainingRevs) { queueRemoteRevision(rev); } pullRemoteRevisions(); } --httpConnectionCount; // Start another task if there are still revisions waiting to be pulled: pullRemoteRevisions(); } }); pendingFutures.add(future); } /** * This will be called when _revsToInsert fills up: */ @InterfaceAudience.Private public void insertDownloads(List<RevisionInternal> downloads) { Log.i(Log.TAG_SYNC, this + " inserting " + downloads.size() + " revisions..."); long time = System.currentTimeMillis(); Collections.sort(downloads, getRevisionListComparator()); db.beginTransaction(); boolean success = false; try { for (RevisionInternal rev : downloads) { long fakeSequence = rev.getSequence(); List<String> history = db.parseCouchDBRevisionHistory(rev.getProperties()); if (history.isEmpty() && rev.getGeneration() > 1) { Log.w(Log.TAG_SYNC, "%s: Missing revision history in response for: %s", this, rev); setError(new CouchbaseLiteException(Status.UPSTREAM_ERROR)); revisionFailed(); continue; } Log.v(Log.TAG_SYNC, "%s: inserting %s %s", this, rev.getDocId(), history); // Insert the revision try { db.forceInsert(rev, history, remote); } catch (CouchbaseLiteException e) { if (e.getCBLStatus().getCode() == Status.FORBIDDEN) { Log.i(Log.TAG_SYNC, "%s: Remote rev failed validation: %s", this, rev); } else { Log.w(Log.TAG_SYNC, "%s: failed to write %s: status=%s", this, rev, e.getCBLStatus().getCode()); revisionFailed(); setError(new HttpResponseException(e.getCBLStatus().getCode(), null)); continue; } } // Mark this revision's fake sequence as processed: pendingSequences.removeSequence(fakeSequence); } Log.v(Log.TAG_SYNC, "%s: finished inserting %d revisions", this, downloads.size()); success = true; } catch (SQLException e) { Log.e(Log.TAG_SYNC, this + ": Exception inserting revisions", e); } finally { db.endTransaction(success); if (success) { // Checkpoint: setLastSequence(pendingSequences.getCheckpointedValue()); long delta = System.currentTimeMillis() - time; Log.v(Log.TAG_SYNC, "%s: inserted %d revs in %d milliseconds", this, downloads.size(), delta); int newCompletedChangesCount = getCompletedChangesCount().get() + downloads.size(); Log.d(Log.TAG_SYNC, "%s insertDownloads() updating completedChangesCount from %d -> %d ", this, getCompletedChangesCount().get(), newCompletedChangesCount); addToCompletedChangesCount(downloads.size()); } } } @InterfaceAudience.Private private Comparator<RevisionInternal> getRevisionListComparator() { return new Comparator<RevisionInternal>() { public int compare(RevisionInternal reva, RevisionInternal revb) { return Misc.TDSequenceCompare(reva.getSequence(), revb.getSequence()); } }; } private void revisionFailed(RevisionInternal rev, Throwable throwable) { if (Utils.isTransientError(throwable)) { revisionFailed(); // retry later } else { Log.v(Log.TAG_SYNC, "%s: giving up on %s: %s", this, rev, throwable); pendingSequences.removeSequence(rev.getSequence()); } completedChangesCount.getAndIncrement(); } /** * Fetches the contents of a revision from the remote db, including its parent revision ID. * The contents are stored into rev.properties. */ @InterfaceAudience.Private public void pullRemoteRevision(final RevisionInternal rev) { Log.d(Log.TAG_SYNC, "%s: pullRemoteRevision with rev: %s", this, rev); ++httpConnectionCount; // Construct a query. We want the revision history, and the bodies of attachments that have // been added since the latest revisions we have locally. // See: http://wiki.apache.org/couchdb/HTTP_Document_API#Getting_Attachments_With_a_Document StringBuilder path = new StringBuilder("/" + URLEncoder.encode(rev.getDocId()) + "?rev=" + URLEncoder.encode(rev.getRevId()) + "&revs=true&attachments=true"); List<String> knownRevs = knownCurrentRevIDs(rev); if (knownRevs == null) { Log.w(Log.TAG_SYNC, "knownRevs == null, something is wrong, possibly the replicator has shut down"); --httpConnectionCount; return; } if (knownRevs.size() > 0) { path.append("&atts_since="); path.append(joinQuotedEscaped(knownRevs)); } //create a final version of this variable for the log statement inside //FIXME find a way to avoid this final String pathInside = path.toString(); Future future = sendAsyncMultipartDownloaderRequest("GET", pathInside, null, db, new RemoteRequestCompletionBlock() { @Override public void onCompletion(HttpResponse httpResponse, Object result, Throwable e) { if (e != null) { Log.e(Log.TAG_SYNC, "Error pulling remote revision", e); revisionFailed(rev, e); } else { Map<String, Object> properties = (Map<String, Object>) result; PulledRevision gotRev = new PulledRevision(properties, db); gotRev.setSequence(rev.getSequence()); // Add to batcher ... eventually it will be fed to -insertDownloads:. // TODO: [gotRev.body compact]; Log.d(Log.TAG_SYNC, "%s: pullRemoteRevision add rev: %s to batcher: %s", PullerInternal.this, gotRev, downloadsToInsert); downloadsToInsert.queueObject(gotRev); } // Note that we've finished this task; then start another one if there // are still revisions waiting to be pulled: --httpConnectionCount; pullRemoteRevisions(); } }); pendingFutures.add(future); } @InterfaceAudience.Private public String joinQuotedEscaped(List<String> strings) { if (strings.size() == 0) { return "[]"; } byte[] json = null; try { json = Manager.getObjectMapper().writeValueAsBytes(strings); } catch (Exception e) { Log.w(Log.TAG_SYNC, "Unable to serialize json", e); } return URLEncoder.encode(new String(json)); } @InterfaceAudience.Private /* package */ List<String> knownCurrentRevIDs(RevisionInternal rev) { if (db != null) { return db.getAllRevisionsOfDocumentID(rev.getDocId(), true).getAllRevIds(); } return null; } /** * Add a revision to the appropriate queue of revs to individually GET */ @InterfaceAudience.Private protected void queueRemoteRevision(RevisionInternal rev) { if (rev.isDeleted()) { if (deletedRevsToPull == null) { deletedRevsToPull = new ArrayList<RevisionInternal>(100); } deletedRevsToPull.add(rev); } else { if (revsToPull == null) revsToPull = new ArrayList<RevisionInternal>(100); revsToPull.add(rev); } } private void initPendingSequences() { if (pendingSequences == null) { pendingSequences = new SequenceMap(); if (getLastSequence() != null) { // Prime _pendingSequences so its checkpointedValue will reflect the last known seq: long seq = pendingSequences.addValue(getLastSequence()); pendingSequences.removeSequence(seq); assert (pendingSequences.getCheckpointedValue().equals(getLastSequence())); } } } /** * @exclude */ @InterfaceAudience.Private public String getLastSequence() { return lastSequence; } @Override public HttpClient getHttpClient() { HttpClient httpClient = this.clientFactory.getHttpClient(); return httpClient; } @Override public void changeTrackerReceivedChange(final Map<String, Object> change) { // this callback will be on the changetracker thread, but we need // to do the work on the replicator thread. workExecutor.submit(new Runnable() { @Override public void run() { try { Log.d(Log.TAG_SYNC, "changeTrackerReceivedChange: %s", change); processChangeTrackerChange(change); } catch (Exception e) { Log.e(Log.TAG_SYNC, "Error processChangeTrackerChange(): %s", e); e.printStackTrace(); throw new RuntimeException(e); } } }); } protected void processChangeTrackerChange(final Map<String, Object> change) { String lastSequence = change.get("seq").toString(); String docID = (String) change.get("id"); if (docID == null) { return; } if (!Database.isValidDocumentId(docID)) { Log.w(Log.TAG_SYNC, "%s: Received invalid doc ID from _changes: %s", this, change); return; } boolean deleted = (change.containsKey("deleted") && ((Boolean) change.get("deleted")).equals(Boolean.TRUE)); List<Map<String, Object>> changes = (List<Map<String, Object>>) change.get("changes"); for (Map<String, Object> changeDict : changes) { String revID = (String) changeDict.get("rev"); if (revID == null) { continue; } PulledRevision rev = new PulledRevision(docID, revID, deleted, db); rev.setRemoteSequenceID(lastSequence); Log.d(Log.TAG_SYNC, "%s: adding rev to inbox %s", this, rev); Log.v(Log.TAG_SYNC, "%s: changeTrackerReceivedChange() incrementing changesCount by 1", this); // this is purposefully done slightly different than the ios version addToChangesCount(1); addToInbox(rev); } } @Override public void changeTrackerStopped(ChangeTracker tracker) { // this callback will be on the changetracker thread, but we need // to do the work on the replicator thread. workExecutor.submit(new Runnable() { @Override public void run() { try { processChangeTrackerStopped(changeTracker); } catch (RuntimeException e) { e.printStackTrace(); throw e; } } }); } private void processChangeTrackerStopped(ChangeTracker tracker) { Log.d(Log.TAG_SYNC, "changeTrackerStopped. lifecycle: %s", lifecycle); switch (lifecycle) { case ONESHOT: Log.d(Log.TAG_SYNC, "fire STOP_GRACEFUL"); stateMachine.fire(ReplicationTrigger.STOP_GRACEFUL); // TODO: call triggerStop(); instead of this, just to be more consistent break; case CONTINUOUS: if (stateMachine.isInState(ReplicationState.OFFLINE)) { // in this case, we don't want to do anything here, since // we told the change tracker to go offline .. Log.d(Log.TAG_SYNC, "Change tracker stopped because we are going offline"); } else { // otherwise, try to restart the change tracker, since it should // always be running in continuous replications String msg = String.format("Change tracker stopped during continuous replication"); Log.e(Log.TAG_SYNC, msg); parentReplication.setLastError(new Exception(msg)); fireTrigger(ReplicationTrigger.WAITING_FOR_CHANGES); Log.d(Log.TAG_SYNC, "Scheduling change tracker restart in %d ms", CHANGE_TRACKER_RESTART_DELAY_MS); workExecutor.schedule(new Runnable() { @Override public void run() { // the replication may have been stopped by the time this scheduled fires // so we need to check the state here. if (stateMachine.isInState(ReplicationState.RUNNING)) { Log.d(Log.TAG_SYNC, "%s still running, restarting change tracker", this); startChangeTracker(); } else { Log.d(Log.TAG_SYNC, "%s still no longer running, not restarting change tracker", this); } } }, CHANGE_TRACKER_RESTART_DELAY_MS, TimeUnit.MILLISECONDS); } break; default: throw new RuntimeException(String.format("Unknown lifecycle: %s", lifecycle)); } } @Override public void changeTrackerFinished(ChangeTracker tracker) { workExecutor.submit(new Runnable() { @Override public void run() { try { Log.d(Log.TAG_SYNC, "changeTrackerFinished"); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } }); } @Override public void changeTrackerCaughtUp() { workExecutor.submit(new Runnable() { @Override public void run() { try { Log.d(Log.TAG_SYNC, "changeTrackerCaughtUp"); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } }); // for continuous replications, once the change tracker is caught up, we // should try to go into the idle state. if (isContinuous()) { // this has to be on a different thread than the replicator thread, or else it's a deadlock // because it might be waiting for jobs that have been scheduled, and not // yet executed (and which will never execute because this will block processing). new Thread(new Runnable() { @Override public void run() { try { if (batcher != null) { Log.d(Log.TAG_SYNC, "batcher.waitForPendingFutures()"); batcher.waitForPendingFutures(); } Log.d(Log.TAG_SYNC, "waitForPendingFutures()"); waitForPendingFutures(); if (downloadsToInsert != null) { Log.d(Log.TAG_SYNC, "downloadsToInsert.waitForPendingFutures()"); downloadsToInsert.waitForPendingFutures(); } } catch (Exception e) { Log.e(Log.TAG_SYNC, "Exception waiting for jobs to drain: %s", e); e.printStackTrace(); } finally { fireTrigger(ReplicationTrigger.WAITING_FOR_CHANGES); } Log.e(Log.TAG_SYNC, "PullerInternal stopGraceful.run() finished"); } }).start(); } } protected void stopGraceful() { super.stopGraceful(); Log.d(Log.TAG_SYNC, "PullerInternal stopGraceful()"); // this has to be on a different thread than the replicator thread, or else it's a deadlock // because it might be waiting for jobs that have been scheduled, and not // yet executed (and which will never execute because this will block processing). new Thread(new Runnable() { @Override public void run() { try { // stop things and possibly wait for them to stop .. if (batcher != null) { Log.d(Log.TAG_SYNC, "batcher.waitForPendingFutures()"); // TODO: should we call batcher.flushAll(); here? batcher.waitForPendingFutures(); } Log.d(Log.TAG_SYNC, "waitForPendingFutures()"); waitForPendingFutures(); if (downloadsToInsert != null) { Log.d(Log.TAG_SYNC, "downloadsToInsert.waitForPendingFutures()"); // TODO: should we call downloadsToInsert.flushAll(); here? downloadsToInsert.waitForPendingFutures(); } if (changeTracker != null) { Log.d(Log.TAG_SYNC, "stopping change tracker"); changeTracker.stop(); Log.d(Log.TAG_SYNC, "stopped change tracker"); } } catch (Exception e) { Log.e(Log.TAG_SYNC, "stopGraceful.run() had exception: %s", e); e.printStackTrace(); } finally { triggerStopImmediate(); } Log.e(Log.TAG_SYNC, "PullerInternal stopGraceful.run() finished"); } }).start(); } public void waitForPendingFutures() { try { while (!pendingFutures.isEmpty()) { Future future = pendingFutures.take(); try { Log.d(Log.TAG_SYNC, "calling future.get() on %s", future); future.get(); Log.d(Log.TAG_SYNC, "done calling future.get() on %s", future); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } } catch (Exception e) { Log.e(Log.TAG_SYNC, "Exception waiting for pending futures: %s", e); } } @Override public boolean shouldCreateTarget() { return false; }; @Override public void setCreateTarget(boolean createTarget) { // silently ignore this -- doesn't make sense for pull replicator }; @Override protected void goOffline() { super.goOffline(); // stop change tracker if (changeTracker != null) { changeTracker.stop(); } // TODO: stop remote requests in progress, but first // TODO: write a test that verifies this actually works } @Override protected void goOnline() { super.goOnline(); // start change tracker beginReplicating(); } }
src/main/java/com/couchbase/lite/replicator/PullerInternal.java
package com.couchbase.lite.replicator; import com.couchbase.lite.CouchbaseLiteException; import com.couchbase.lite.Database; import com.couchbase.lite.Manager; import com.couchbase.lite.Misc; import com.couchbase.lite.RevisionList; import com.couchbase.lite.Status; import com.couchbase.lite.internal.InterfaceAudience; import com.couchbase.lite.internal.RevisionInternal; import com.couchbase.lite.storage.SQLException; import com.couchbase.lite.support.BatchProcessor; import com.couchbase.lite.support.Batcher; import com.couchbase.lite.support.HttpClientFactory; import com.couchbase.lite.support.RemoteRequestCompletionBlock; import com.couchbase.lite.support.SequenceMap; import com.couchbase.lite.util.CollectionUtils; import com.couchbase.lite.util.Log; import com.couchbase.lite.util.Utils; import org.apache.http.HttpResponse; import org.apache.http.client.HttpClient; import org.apache.http.client.HttpResponseException; import java.net.URL; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; /** * Pull Replication * * @exclude */ @InterfaceAudience.Private public class PullerInternal extends ReplicationInternal implements ChangeTrackerClient{ private static final int MAX_OPEN_HTTP_CONNECTIONS = 16; // Maximum number of revs to fetch in a single bulk request public static final int MAX_REVS_TO_GET_IN_BULK = 50; // Maximum number of revision IDs to pass in an "?atts_since=" query param public static final int MAX_NUMBER_OF_ATTS_SINCE = 50; public static int CHANGE_TRACKER_RESTART_DELAY_MS = 10 * 1000; private ChangeTracker changeTracker; protected SequenceMap pendingSequences; protected Boolean canBulkGet; // Does the server support _bulk_get requests? protected List<RevisionInternal> revsToPull; protected List<RevisionInternal> bulkRevsToPull; protected List<RevisionInternal> deletedRevsToPull; protected int httpConnectionCount; protected Batcher<RevisionInternal> downloadsToInsert; public PullerInternal(Database db, URL remote, HttpClientFactory clientFactory, ScheduledExecutorService workExecutor, Replication.Lifecycle lifecycle, Replication parentReplication) { super(db, remote, clientFactory, workExecutor, lifecycle, parentReplication); } /** * Actual work of starting the replication process. */ protected void beginReplicating() { Log.d(Log.TAG_SYNC, "startReplicating()"); initPendingSequences(); initDownloadsToInsert(); startChangeTracker(); // start replicator .. } private void initDownloadsToInsert() { if (downloadsToInsert == null) { int capacity = 200; int delay = 1000; downloadsToInsert = new Batcher<RevisionInternal>(workExecutor, capacity, delay, new BatchProcessor<RevisionInternal>() { @Override public void process(List<RevisionInternal> inbox) { insertDownloads(inbox); } }); } } public boolean isPull() { return true; } /* package */ void maybeCreateRemoteDB() { // puller never needs to do this } protected void startChangeTracker() { ChangeTracker.ChangeTrackerMode changeTrackerMode; // it always starts out as OneShot, but if its a continuous replication // it will switch to longpoll later. changeTrackerMode = ChangeTracker.ChangeTrackerMode.OneShot; Log.w(Log.TAG_SYNC, "%s: starting ChangeTracker with since=%s mode=%s", this, lastSequence, changeTrackerMode); changeTracker = new ChangeTracker(remote, changeTrackerMode, true, lastSequence, this); changeTracker.setAuthenticator(getAuthenticator()); Log.w(Log.TAG_SYNC, "%s: started ChangeTracker %s", this, changeTracker); if (filterName != null) { changeTracker.setFilterName(filterName); if (filterParams != null) { changeTracker.setFilterParams(filterParams); } } changeTracker.setDocIDs(documentIDs); changeTracker.setRequestHeaders(requestHeaders); changeTracker.setContinuous(lifecycle == Replication.Lifecycle.CONTINUOUS); changeTracker.setUsePOST(serverIsSyncGatewayVersion("0.93")); changeTracker.start(); } /** * Process a bunch of remote revisions from the _changes feed at once */ @Override @InterfaceAudience.Private protected void processInbox(RevisionList inbox) { Log.d(Log.TAG_SYNC, "processInbox called"); if (canBulkGet == null) { canBulkGet = serverIsSyncGatewayVersion("0.81"); } // Ask the local database which of the revs are not known to it: String lastInboxSequence = ((PulledRevision) inbox.get(inbox.size() - 1)).getRemoteSequenceID(); int numRevisionsRemoved = 0; try { // findMissingRevisions is the local equivalent of _revs_diff. it looks at the // array of revisions in ‘inbox’ and removes the ones that already exist. So whatever’s left in ‘inbox’ // afterwards are the revisions that need to be downloaded. numRevisionsRemoved = db.findMissingRevisions(inbox); } catch (SQLException e) { Log.e(Log.TAG_SYNC, String.format("%s failed to look up local revs", this), e); inbox = null; } //introducing this to java version since inbox may now be null everywhere int inboxCount = 0; if (inbox != null) { inboxCount = inbox.size(); } if (numRevisionsRemoved > 0) { Log.v(Log.TAG_SYNC, "%s: processInbox() setting changesCount to: %s", this, getChangesCount().get() - numRevisionsRemoved); // May decrease the changesCount, to account for the revisions we just found out we don’t need to get. addToChangesCount(-1 * numRevisionsRemoved); } if (inboxCount == 0) { // Nothing to do. Just bump the lastSequence. Log.w(Log.TAG_SYNC, "%s no new remote revisions to fetch. add lastInboxSequence (%s) to pendingSequences (%s)", this, lastInboxSequence, pendingSequences); long seq = pendingSequences.addValue(lastInboxSequence); pendingSequences.removeSequence(seq); setLastSequence(pendingSequences.getCheckpointedValue()); return; } Log.v(Log.TAG_SYNC, "%s: fetching %s remote revisions...", this, inboxCount); // Dump the revs into the queue of revs to pull from the remote db: int numBulked = 0; for (int i = 0; i < inbox.size(); i++) { PulledRevision rev = (PulledRevision) inbox.get(i); //TODO: add support for rev isConflicted if (canBulkGet || (rev.getGeneration() == 1 && !rev.isDeleted())) { // &&!rev.isConflicted) //optimistically pull 1st-gen revs in bulk if (bulkRevsToPull == null) bulkRevsToPull = new ArrayList<RevisionInternal>(100); bulkRevsToPull.add(rev); ++numBulked; } else { queueRemoteRevision(rev); } rev.setSequence(pendingSequences.addValue(rev.getRemoteSequenceID())); } pullRemoteRevisions(); } /** * Start up some HTTP GETs, within our limit on the maximum simultaneous number * <p/> * The entire method is not synchronized, only the portion pulling work off the list * Important to not hold the synchronized block while we do network access */ @InterfaceAudience.Private public void pullRemoteRevisions() { //find the work to be done in a synchronized block List<RevisionInternal> workToStartNow = new ArrayList<RevisionInternal>(); List<RevisionInternal> bulkWorkToStartNow = new ArrayList<RevisionInternal>(); while (httpConnectionCount + workToStartNow.size() < MAX_OPEN_HTTP_CONNECTIONS) { int nBulk = 0; if (bulkRevsToPull != null) { nBulk = (bulkRevsToPull.size() < MAX_REVS_TO_GET_IN_BULK) ? bulkRevsToPull.size() : MAX_REVS_TO_GET_IN_BULK; } if (nBulk == 1) { // Rather than pulling a single revision in 'bulk', just pull it normally: queueRemoteRevision(bulkRevsToPull.get(0)); bulkRevsToPull.remove(0); nBulk = 0; } if (nBulk > 0) { // Prefer to pull bulk revisions: bulkWorkToStartNow.addAll(bulkRevsToPull.subList(0, nBulk)); bulkRevsToPull.subList(0, nBulk).clear(); } else { // Prefer to pull an existing revision over a deleted one: List<RevisionInternal> queue = revsToPull; if (queue == null || queue.size() == 0) { queue = deletedRevsToPull; if (queue == null || queue.size() == 0) break; // both queues are empty } workToStartNow.add(queue.get(0)); queue.remove(0); } } //actually run it outside the synchronized block if(bulkWorkToStartNow.size() > 0) { pullBulkRevisions(bulkWorkToStartNow); } for (RevisionInternal work : workToStartNow) { pullRemoteRevision(work); } } // Get a bunch of revisions in one bulk request. Will use _bulk_get if possible. protected void pullBulkRevisions(List<RevisionInternal> bulkRevs) { int nRevs = bulkRevs.size(); if (nRevs == 0) { return; } Log.v(Log.TAG_SYNC, "%s bulk-fetching %d remote revisions...", this, nRevs); Log.v(Log.TAG_SYNC, "%s bulk-fetching remote revisions: %s", this, bulkRevs); if (!canBulkGet) { pullBulkWithAllDocs(bulkRevs); return; } Log.v(Log.TAG_SYNC, "%s: POST _bulk_get", this); final List<RevisionInternal> remainingRevs = new ArrayList<RevisionInternal>(bulkRevs); ++httpConnectionCount; final BulkDownloader dl; try { dl = new BulkDownloader(workExecutor, clientFactory, remote, bulkRevs, db, this.requestHeaders, new BulkDownloader.BulkDownloaderDocumentBlock() { public void onDocument(Map<String, Object> props) { // Got a revision! // Find the matching revision in 'remainingRevs' and get its sequence: RevisionInternal rev; if (props.get("_id") != null) { rev = new RevisionInternal(props, db); } else { rev = new RevisionInternal((String) props.get("id"), (String) props.get("rev"), false, db); } int pos = remainingRevs.indexOf(rev); if (pos > -1) { rev.setSequence(remainingRevs.get(pos).getSequence()); remainingRevs.remove(pos); } else { Log.w(Log.TAG_SYNC, "%s : Received unexpected rev rev", this); } if (props.get("_id") != null) { // Add to batcher ... eventually it will be fed to -insertRevisions:. queueDownloadedRevision(rev); } else { Status status = statusFromBulkDocsResponseItem(props); error = new CouchbaseLiteException(status); revisionFailed(rev, error); } } }, new RemoteRequestCompletionBlock() { public void onCompletion(HttpResponse httpResponse, Object result, Throwable e) { // The entire _bulk_get is finished: if (e != null) { setError(e); revisionFailed(); completedChangesCount.addAndGet(remainingRevs.size()); } --httpConnectionCount; // Start another task if there are still revisions waiting to be pulled: pullRemoteRevisions(); } } ); } catch (Exception e) { Log.e(Log.TAG_SYNC, "%s: pullBulkRevisions Exception: %s", this, e); return; } dl.setAuthenticator(getAuthenticator()); Future future = remoteRequestExecutor.submit(dl); pendingFutures.add(future); } // This invokes the tranformation block if one is installed and queues the resulting CBL_Revision private void queueDownloadedRevision(RevisionInternal rev) { if (revisionBodyTransformationBlock != null) { // Add 'file' properties to attachments pointing to their bodies: for (Map.Entry<String, Map<String, Object>> entry : ((Map<String, Map<String, Object>>) rev.getProperties().get("_attachments")).entrySet()) { String name = entry.getKey(); Map<String, Object> attachment = entry.getValue(); attachment.remove("file"); if (attachment.get("follows") != null && attachment.get("data") == null) { String filePath = db.fileForAttachmentDict(attachment).getPath(); if (filePath != null) attachment.put("file", filePath); } } RevisionInternal xformed = transformRevision(rev); if (xformed == null) { Log.v(Log.TAG_SYNC, "%s: Transformer rejected revision %s", this, rev); pendingSequences.removeSequence(rev.getSequence()); lastSequence = pendingSequences.getCheckpointedValue(); return; } rev = xformed; // Clean up afterwards Map<String, Object> attachments = (Map<String, Object>) rev.getProperties().get("_attachments"); for (Map.Entry<String, Map<String, Object>> entry : ((Map<String, Map<String, Object>>) rev.getProperties().get("_attachments")).entrySet()) { Map<String, Object> attachment = entry.getValue(); attachment.remove("file"); } } //TODO: rev.getBody().compact(); downloadsToInsert.queueObject(rev); } // Get as many revisions as possible in one _all_docs request. // This is compatible with CouchDB, but it only works for revs of generation 1 without attachments. protected void pullBulkWithAllDocs(final List<RevisionInternal> bulkRevs) { // http://wiki.apache.org/couchdb/HTTP_Bulk_Document_API ++httpConnectionCount; final RevisionList remainingRevs = new RevisionList(bulkRevs); Collection<String> keys = CollectionUtils.transform(bulkRevs, new CollectionUtils.Functor<RevisionInternal, String>() { public String invoke(RevisionInternal rev) { return rev.getDocId(); } } ); Map<String, Object> body = new HashMap<String, Object>(); body.put("keys", keys); Future future = sendAsyncRequest("POST", "/_all_docs?include_docs=true", body, new RemoteRequestCompletionBlock() { public void onCompletion(HttpResponse httpResponse, Object result, Throwable e) { Map<String, Object> res = (Map<String, Object>) result; if (e != null) { setError(e); revisionFailed(); // TODO: There is a known bug caused by the line below, which is // TODO: causing testMockSinglePullCouchDb to fail when running on a Nexus5 device. // TODO: (the batching behavior is different in that case) // TODO: See https://github.com/couchbase/couchbase-lite-java-core/issues/271 // completedChangesCount.addAndGet(bulkRevs.size()); } else { // Process the resulting rows' documents. // We only add a document if it doesn't have attachments, and if its // revID matches the one we asked for. List<Map<String, Object>> rows = (List<Map<String, Object>>) res.get("rows"); Log.v(Log.TAG_SYNC, "%s checking %d bulk-fetched remote revisions", this, rows.size()); for (Map<String, Object> row : rows) { Map<String, Object> doc = (Map<String, Object>) row.get("doc"); if (doc != null && doc.get("_attachments") == null) { RevisionInternal rev = new RevisionInternal(doc, db); RevisionInternal removedRev = remainingRevs.removeAndReturnRev(rev); if (removedRev != null) { rev.setSequence(removedRev.getSequence()); queueDownloadedRevision(rev); } } else { Status status = statusFromBulkDocsResponseItem(row); if (status.isError() && row.containsKey("key") && row.get("key") != null) { RevisionInternal rev = remainingRevs.revWithDocId((String)row.get("key")); if (rev != null) { remainingRevs.remove(rev); revisionFailed(rev, new CouchbaseLiteException(status)); } } } } } // Any leftover revisions that didn't get matched will be fetched individually: if (remainingRevs.size() > 0) { Log.v(Log.TAG_SYNC, "%s bulk-fetch didn't work for %d of %d revs; getting individually", this, remainingRevs.size(), bulkRevs.size()); for (RevisionInternal rev : remainingRevs) { queueRemoteRevision(rev); } pullRemoteRevisions(); } --httpConnectionCount; // Start another task if there are still revisions waiting to be pulled: pullRemoteRevisions(); } }); pendingFutures.add(future); } /** * This will be called when _revsToInsert fills up: */ @InterfaceAudience.Private public void insertDownloads(List<RevisionInternal> downloads) { Log.i(Log.TAG_SYNC, this + " inserting " + downloads.size() + " revisions..."); long time = System.currentTimeMillis(); Collections.sort(downloads, getRevisionListComparator()); db.beginTransaction(); boolean success = false; try { for (RevisionInternal rev : downloads) { long fakeSequence = rev.getSequence(); List<String> history = db.parseCouchDBRevisionHistory(rev.getProperties()); if (history.isEmpty() && rev.getGeneration() > 1) { Log.w(Log.TAG_SYNC, "%s: Missing revision history in response for: %s", this, rev); setError(new CouchbaseLiteException(Status.UPSTREAM_ERROR)); revisionFailed(); continue; } Log.v(Log.TAG_SYNC, "%s: inserting %s %s", this, rev.getDocId(), history); // Insert the revision try { db.forceInsert(rev, history, remote); } catch (CouchbaseLiteException e) { if (e.getCBLStatus().getCode() == Status.FORBIDDEN) { Log.i(Log.TAG_SYNC, "%s: Remote rev failed validation: %s", this, rev); } else { Log.w(Log.TAG_SYNC, "%s: failed to write %s: status=%s", this, rev, e.getCBLStatus().getCode()); revisionFailed(); setError(new HttpResponseException(e.getCBLStatus().getCode(), null)); continue; } } // Mark this revision's fake sequence as processed: pendingSequences.removeSequence(fakeSequence); } Log.v(Log.TAG_SYNC, "%s: finished inserting %d revisions", this, downloads.size()); success = true; } catch (SQLException e) { Log.e(Log.TAG_SYNC, this + ": Exception inserting revisions", e); } finally { db.endTransaction(success); if (success) { // Checkpoint: setLastSequence(pendingSequences.getCheckpointedValue()); long delta = System.currentTimeMillis() - time; Log.v(Log.TAG_SYNC, "%s: inserted %d revs in %d milliseconds", this, downloads.size(), delta); int newCompletedChangesCount = getCompletedChangesCount().get() + downloads.size(); Log.d(Log.TAG_SYNC, "%s insertDownloads() updating completedChangesCount from %d -> %d ", this, getCompletedChangesCount().get(), newCompletedChangesCount); addToCompletedChangesCount(downloads.size()); } } } @InterfaceAudience.Private private Comparator<RevisionInternal> getRevisionListComparator() { return new Comparator<RevisionInternal>() { public int compare(RevisionInternal reva, RevisionInternal revb) { return Misc.TDSequenceCompare(reva.getSequence(), revb.getSequence()); } }; } private void revisionFailed(RevisionInternal rev, Throwable throwable) { if (Utils.isTransientError(throwable)) { revisionFailed(); // retry later } else { Log.v(Log.TAG_SYNC, "%s: giving up on %s: %s", this, rev, throwable); pendingSequences.removeSequence(rev.getSequence()); } completedChangesCount.getAndIncrement(); } /** * Fetches the contents of a revision from the remote db, including its parent revision ID. * The contents are stored into rev.properties. */ @InterfaceAudience.Private public void pullRemoteRevision(final RevisionInternal rev) { Log.d(Log.TAG_SYNC, "%s: pullRemoteRevision with rev: %s", this, rev); ++httpConnectionCount; // Construct a query. We want the revision history, and the bodies of attachments that have // been added since the latest revisions we have locally. // See: http://wiki.apache.org/couchdb/HTTP_Document_API#Getting_Attachments_With_a_Document StringBuilder path = new StringBuilder("/" + URLEncoder.encode(rev.getDocId()) + "?rev=" + URLEncoder.encode(rev.getRevId()) + "&revs=true&attachments=true"); List<String> knownRevs = knownCurrentRevIDs(rev); if (knownRevs == null) { Log.w(Log.TAG_SYNC, "knownRevs == null, something is wrong, possibly the replicator has shut down"); --httpConnectionCount; return; } if (knownRevs.size() > 0) { path.append("&atts_since="); path.append(joinQuotedEscaped(knownRevs)); } //create a final version of this variable for the log statement inside //FIXME find a way to avoid this final String pathInside = path.toString(); Future future = sendAsyncMultipartDownloaderRequest("GET", pathInside, null, db, new RemoteRequestCompletionBlock() { @Override public void onCompletion(HttpResponse httpResponse, Object result, Throwable e) { if (e != null) { Log.e(Log.TAG_SYNC, "Error pulling remote revision", e); revisionFailed(rev, e); } else { Map<String, Object> properties = (Map<String, Object>) result; PulledRevision gotRev = new PulledRevision(properties, db); gotRev.setSequence(rev.getSequence()); // Add to batcher ... eventually it will be fed to -insertDownloads:. // TODO: [gotRev.body compact]; Log.d(Log.TAG_SYNC, "%s: pullRemoteRevision add rev: %s to batcher: %s", PullerInternal.this, gotRev, downloadsToInsert); downloadsToInsert.queueObject(gotRev); } // Note that we've finished this task; then start another one if there // are still revisions waiting to be pulled: --httpConnectionCount; pullRemoteRevisions(); } }); pendingFutures.add(future); } @InterfaceAudience.Private public String joinQuotedEscaped(List<String> strings) { if (strings.size() == 0) { return "[]"; } byte[] json = null; try { json = Manager.getObjectMapper().writeValueAsBytes(strings); } catch (Exception e) { Log.w(Log.TAG_SYNC, "Unable to serialize json", e); } return URLEncoder.encode(new String(json)); } @InterfaceAudience.Private /* package */ List<String> knownCurrentRevIDs(RevisionInternal rev) { if (db != null) { return db.getAllRevisionsOfDocumentID(rev.getDocId(), true).getAllRevIds(); } return null; } /** * Add a revision to the appropriate queue of revs to individually GET */ @InterfaceAudience.Private protected void queueRemoteRevision(RevisionInternal rev) { if (rev.isDeleted()) { if (deletedRevsToPull == null) { deletedRevsToPull = new ArrayList<RevisionInternal>(100); } deletedRevsToPull.add(rev); } else { if (revsToPull == null) revsToPull = new ArrayList<RevisionInternal>(100); revsToPull.add(rev); } } private void initPendingSequences() { if (pendingSequences == null) { pendingSequences = new SequenceMap(); if (getLastSequence() != null) { // Prime _pendingSequences so its checkpointedValue will reflect the last known seq: long seq = pendingSequences.addValue(getLastSequence()); pendingSequences.removeSequence(seq); assert (pendingSequences.getCheckpointedValue().equals(getLastSequence())); } } } /** * @exclude */ @InterfaceAudience.Private public String getLastSequence() { return lastSequence; } @Override public HttpClient getHttpClient() { HttpClient httpClient = this.clientFactory.getHttpClient(); return httpClient; } @Override public void changeTrackerReceivedChange(final Map<String, Object> change) { // this callback will be on the changetracker thread, but we need // to do the work on the replicator thread. workExecutor.submit(new Runnable() { @Override public void run() { try { Log.d(Log.TAG_SYNC, "changeTrackerReceivedChange: %s", change); processChangeTrackerChange(change); } catch (Exception e) { Log.e(Log.TAG_SYNC, "Error processChangeTrackerChange(): %s", e); e.printStackTrace(); throw new RuntimeException(e); } } }); } protected void processChangeTrackerChange(final Map<String, Object> change) { String lastSequence = change.get("seq").toString(); String docID = (String) change.get("id"); if (docID == null) { return; } if (!Database.isValidDocumentId(docID)) { Log.w(Log.TAG_SYNC, "%s: Received invalid doc ID from _changes: %s", this, change); return; } boolean deleted = (change.containsKey("deleted") && ((Boolean) change.get("deleted")).equals(Boolean.TRUE)); List<Map<String, Object>> changes = (List<Map<String, Object>>) change.get("changes"); for (Map<String, Object> changeDict : changes) { String revID = (String) changeDict.get("rev"); if (revID == null) { continue; } PulledRevision rev = new PulledRevision(docID, revID, deleted, db); rev.setRemoteSequenceID(lastSequence); Log.d(Log.TAG_SYNC, "%s: adding rev to inbox %s", this, rev); Log.v(Log.TAG_SYNC, "%s: changeTrackerReceivedChange() incrementing changesCount by 1", this); // this is purposefully done slightly different than the ios version addToChangesCount(1); addToInbox(rev); } } @Override public void changeTrackerStopped(ChangeTracker tracker) { // this callback will be on the changetracker thread, but we need // to do the work on the replicator thread. workExecutor.submit(new Runnable() { @Override public void run() { try { processChangeTrackerStopped(changeTracker); } catch (RuntimeException e) { e.printStackTrace(); throw e; } } }); } private void processChangeTrackerStopped(ChangeTracker tracker) { Log.d(Log.TAG_SYNC, "changeTrackerStopped. lifecycle: %s", lifecycle); switch (lifecycle) { case ONESHOT: Log.d(Log.TAG_SYNC, "fire STOP_GRACEFUL"); stateMachine.fire(ReplicationTrigger.STOP_GRACEFUL); // TODO: call triggerStop(); instead of this, just to be more consistent break; case CONTINUOUS: if (stateMachine.isInState(ReplicationState.OFFLINE)) { // in this case, we don't want to do anything here, since // we told the change tracker to go offline .. Log.d(Log.TAG_SYNC, "Change tracker stopped because we are going offline"); } else { // otherwise, try to restart the change tracker, since it should // always be running in continuous replications String msg = String.format("Change tracker stopped during continuous replication"); Log.e(Log.TAG_SYNC, msg); parentReplication.setLastError(new Exception(msg)); fireTrigger(ReplicationTrigger.WAITING_FOR_CHANGES); Log.d(Log.TAG_SYNC, "Scheduling change tracker restart in %d ms", CHANGE_TRACKER_RESTART_DELAY_MS); workExecutor.schedule(new Runnable() { @Override public void run() { // the replication may have been stopped by the time this scheduled fires // so we need to check the state here. if (stateMachine.isInState(ReplicationState.RUNNING)) { Log.d(Log.TAG_SYNC, "%s still running, restarting change tracker", this); startChangeTracker(); } else { Log.d(Log.TAG_SYNC, "%s still no longer running, not restarting change tracker", this); } } }, CHANGE_TRACKER_RESTART_DELAY_MS, TimeUnit.MILLISECONDS); } break; default: throw new RuntimeException(String.format("Unknown lifecycle: %s", lifecycle)); } } @Override public void changeTrackerFinished(ChangeTracker tracker) { workExecutor.submit(new Runnable() { @Override public void run() { try { Log.d(Log.TAG_SYNC, "changeTrackerFinished"); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } }); } @Override public void changeTrackerCaughtUp() { workExecutor.submit(new Runnable() { @Override public void run() { try { Log.d(Log.TAG_SYNC, "changeTrackerCaughtUp"); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } }); } protected void stopGraceful() { super.stopGraceful(); Log.d(Log.TAG_SYNC, "PullerInternal stopGraceful()"); // this has to be on a different thread than the replicator thread, or else it's a deadlock // because it might be waiting for jobs that have been scheduled, and not // yet executed (and which will never execute because this will block processing). new Thread(new Runnable() { @Override public void run() { try { // stop things and possibly wait for them to stop .. if (batcher != null) { Log.d(Log.TAG_SYNC, "batcher.waitForPendingFutures()"); // TODO: should we call batcher.flushAll(); here? batcher.waitForPendingFutures(); } Log.d(Log.TAG_SYNC, "waitForPendingFutures()"); waitForPendingFutures(); if (downloadsToInsert != null) { Log.d(Log.TAG_SYNC, "downloadsToInsert.waitForPendingFutures()"); // TODO: should we call downloadsToInsert.flushAll(); here? downloadsToInsert.waitForPendingFutures(); } if (changeTracker != null) { Log.d(Log.TAG_SYNC, "stopping change tracker"); changeTracker.stop(); Log.d(Log.TAG_SYNC, "stopped change tracker"); } } catch (Exception e) { Log.e(Log.TAG_SYNC, "stopGraceful.run() had exception: %s", e); e.printStackTrace(); } finally { triggerStopImmediate(); } Log.e(Log.TAG_SYNC, "PullerInternal stopGraceful.run() finished"); } }).start(); } public void waitForPendingFutures() { try { while (!pendingFutures.isEmpty()) { Future future = pendingFutures.take(); try { Log.d(Log.TAG_SYNC, "calling future.get() on %s", future); future.get(); Log.d(Log.TAG_SYNC, "done calling future.get() on %s", future); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } } catch (Exception e) { Log.e(Log.TAG_SYNC, "Exception waiting for pending futures: %s", e); } } @Override public boolean shouldCreateTarget() { return false; }; @Override public void setCreateTarget(boolean createTarget) { // silently ignore this -- doesn't make sense for pull replicator }; @Override protected void goOffline() { super.goOffline(); // stop change tracker if (changeTracker != null) { changeTracker.stop(); } // TODO: stop remote requests in progress, but first // TODO: write a test that verifies this actually works } @Override protected void goOnline() { super.goOnline(); // start change tracker beginReplicating(); } }
testContinuousPullEntersIdleState now passes https://github.com/couchbase/couchbase-lite-android/issues/445
src/main/java/com/couchbase/lite/replicator/PullerInternal.java
testContinuousPullEntersIdleState now passes
<ide><path>rc/main/java/com/couchbase/lite/replicator/PullerInternal.java <ide> } <ide> } <ide> }); <add> <add> // for continuous replications, once the change tracker is caught up, we <add> // should try to go into the idle state. <add> if (isContinuous()) { <add> <add> // this has to be on a different thread than the replicator thread, or else it's a deadlock <add> // because it might be waiting for jobs that have been scheduled, and not <add> // yet executed (and which will never execute because this will block processing). <add> new Thread(new Runnable() { <add> @Override <add> public void run() { <add> <add> try { <add> <add> if (batcher != null) { <add> Log.d(Log.TAG_SYNC, "batcher.waitForPendingFutures()"); <add> batcher.waitForPendingFutures(); <add> } <add> <add> Log.d(Log.TAG_SYNC, "waitForPendingFutures()"); <add> waitForPendingFutures(); <add> <add> if (downloadsToInsert != null) { <add> Log.d(Log.TAG_SYNC, "downloadsToInsert.waitForPendingFutures()"); <add> downloadsToInsert.waitForPendingFutures(); <add> } <add> <add> } catch (Exception e) { <add> Log.e(Log.TAG_SYNC, "Exception waiting for jobs to drain: %s", e); <add> e.printStackTrace(); <add> <add> } finally { <add> <add> fireTrigger(ReplicationTrigger.WAITING_FOR_CHANGES); <add> } <add> <add> Log.e(Log.TAG_SYNC, "PullerInternal stopGraceful.run() finished"); <add> <add> <add> } <add> }).start(); <add> <add> } <add> <add> <add> <ide> } <ide> <ide> protected void stopGraceful() {
Java
epl-1.0
92e21ad2906ef29769e73e4b197be108565bb304
0
ESSICS/org.csstudio.display.builder,ESSICS/org.csstudio.display.builder,ESSICS/org.csstudio.display.builder,kasemir/org.csstudio.display.builder,ESSICS/org.csstudio.display.builder,kasemir/org.csstudio.display.builder,kasemir/org.csstudio.display.builder,kasemir/org.csstudio.display.builder,ESSICS/org.csstudio.display.builder,kasemir/org.csstudio.display.builder
/******************************************************************************* * Copyright (c) 2015-2018 Oak Ridge National Laboratory. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html *******************************************************************************/ package org.csstudio.display.builder.editor.properties; import static org.csstudio.display.builder.editor.Plugin.logger; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.prefs.BackingStoreException; import java.util.prefs.Preferences; import java.util.stream.Collectors; import org.csstudio.display.builder.model.Widget; import org.csstudio.display.builder.model.WidgetProperty; import org.csstudio.display.builder.model.properties.CommonWidgetProperties; import org.csstudio.display.builder.model.properties.RulesWidgetProperty; import org.csstudio.display.builder.model.properties.ScriptPV; import org.csstudio.display.builder.model.rules.RuleInfo; import org.csstudio.display.builder.model.rules.RuleInfo.ExprInfoString; import org.csstudio.display.builder.model.rules.RuleInfo.ExprInfoValue; import org.csstudio.display.builder.model.rules.RuleInfo.ExpressionInfo; import org.csstudio.display.builder.model.rules.RuleInfo.PropInfo; import org.csstudio.display.builder.model.util.ModelThreadPool; import org.csstudio.display.builder.representation.javafx.AutocompleteMenu; import org.csstudio.display.builder.representation.javafx.JFXUtil; import org.csstudio.display.builder.representation.javafx.Messages; import org.csstudio.display.builder.representation.javafx.PVTableItem; import org.csstudio.display.builder.representation.javafx.PVTableItem.AutoCompletedTableCell; import org.csstudio.display.builder.representation.javafx.ScriptsDialog; import org.csstudio.display.builder.representation.javafx.widgets.JFXBaseRepresentation; import org.csstudio.display.builder.util.undo.UndoableActionManager; import org.csstudio.javafx.DialogHelper; import org.csstudio.javafx.LineNumberTableCellFactory; import org.csstudio.javafx.SyntaxHighlightedMultiLineInputDialog; import org.csstudio.javafx.SyntaxHighlightedMultiLineInputDialog.Language; import org.csstudio.javafx.TableHelper; import javafx.application.Platform; import javafx.beans.property.BooleanProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; import javafx.beans.value.ChangeListener; import javafx.collections.FXCollections; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.geometry.Insets; import javafx.geometry.Orientation; import javafx.geometry.Pos; import javafx.scene.Node; import javafx.scene.control.Button; import javafx.scene.control.ButtonType; import javafx.scene.control.CheckBox; import javafx.scene.control.ComboBox; import javafx.scene.control.Dialog; import javafx.scene.control.Label; import javafx.scene.control.SplitPane; import javafx.scene.control.TableCell; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.TableView.TableViewSelectionModel; import javafx.scene.control.TextField; import javafx.scene.control.Tooltip; import javafx.scene.control.cell.CheckBoxTableCell; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.control.cell.TextFieldTableCell; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.Priority; import javafx.scene.layout.Region; import javafx.scene.layout.VBox; import javafx.util.converter.DefaultStringConverter; /** Dialog for editing {@link RuleInfo}s * @author Megan Grodowitz * @author Claudio Rosati * @author Kay Kasemir */ @SuppressWarnings("nls") public class RulesDialog extends Dialog<List<RuleInfo>> { /** Expression info as property-based item for table */ public abstract static class ExprItem<T> { final protected StringProperty boolExp = new SimpleStringProperty(); final protected SimpleObjectProperty<Node> field = new SimpleObjectProperty<Node>(); final protected List<WidgetPropertyBinding<?,?>> bindings = new ArrayList<>(); public ExprItem(final String boolE, final T valE, final UndoableActionManager undo) { this.boolExp.set(boolE); } public SimpleObjectProperty<Node> fieldProperty() { return field; } public StringProperty boolExpProperty() { return boolExp; } abstract boolean isWidgetProperty(); abstract public ExpressionInfo<T> toExprInfo(); abstract public T getPropVal(); }; public static class ExprItemString extends ExprItem<String> { final protected Widget widget = new Widget("ExprItemString"); final protected WidgetProperty<String> string_prop; protected String internal_prop_val; public ExprItemString(String bool_exp, String prop_val, UndoableActionManager undo) { super(bool_exp, prop_val, undo); internal_prop_val = prop_val; string_prop = CommonWidgetProperties.propText.createProperty(widget, prop_val); field.setValue(PropertyPanelSection. bindSimplePropertyField(undo, bindings, string_prop, new ArrayList<Widget>())); } @Override boolean isWidgetProperty() { return false; } @Override public String getPropVal() { internal_prop_val = string_prop.getValue(); return internal_prop_val; } @Override public ExprInfoString toExprInfo() { return new ExprInfoString(boolExp.get(), getPropVal()); } }; public static class ExprItemValue<T> extends ExprItem< WidgetProperty<T> > { protected final WidgetProperty<T> internal_prop_val; public ExprItemValue(String bool_exp, WidgetProperty<T> prop_val, UndoableActionManager undo) { super(bool_exp, prop_val, undo); internal_prop_val = prop_val; field.setValue(PropertyPanelSection. bindSimplePropertyField(undo, bindings, prop_val, new ArrayList<Widget>())); } @Override public ExprInfoValue<T> toExprInfo() { return new ExprInfoValue<T>(boolExp.get(), internal_prop_val); } @Override boolean isWidgetProperty() { return true; } @Override public WidgetProperty<T> getPropVal() { return internal_prop_val; } }; public static class ExprItemFactory { public static <T> ExprItem<?> InfoToItem( final ExpressionInfo<T> info, final UndoableActionManager undo) throws Exception { if (info.getPropVal() instanceof String) return new ExprItemString(info.getBoolExp(), (String)info.getPropVal(), undo); if (info.getPropVal() instanceof WidgetProperty<?>) return new ExprItemValue<>(info.getBoolExp(), (WidgetProperty<?>)info.getPropVal(), undo); logger.log(Level.WARNING, "Tried to make new Expression from info with property not of type String or WidgetProperty: " + info.getPropVal().getClass().getName()); throw new Exception("Invalid info property type"); } public static <T> ExprItem<?> makeNew( final T property, final UndoableActionManager undo) throws Exception { if (property instanceof String) return new ExprItemString("new expr", (String)property, undo); if (property instanceof WidgetProperty<?>) return new ExprItemValue<>("new exp", (WidgetProperty<?>)property, undo); logger.log(Level.WARNING, "Tried to make new Expression from property not of type String or WidgetProperty: " + property.getClass().getName()); throw new Exception("Invalid property type"); } public static <T> ExprItem<?> makeNewFromOld( final T property, final ExprItem<?> old_exp, final UndoableActionManager undo) throws Exception { if (property instanceof String) return new ExprItemString(old_exp.boolExpProperty().get(), (String)property, undo); if (property instanceof WidgetProperty<?>) return new ExprItemValue<>(old_exp.boolExpProperty().get(), (WidgetProperty<?>)property, undo); logger.log(Level.WARNING,"Tried to make new Expression from property not of type String or WidgetProperty: " + property.getClass().getName()); throw new Exception("Invalid property type"); } } /** Modifiable RuleInfo */ public static class RuleItem { public List<ExprItem<?>> expressions; public List<PVTableItem> pvs; protected StringProperty name = new SimpleStringProperty(); protected StringProperty prop_id = new SimpleStringProperty(); public BooleanProperty prop_as_expr = new SimpleBooleanProperty(false); protected Widget attached_widget = null; public RuleItem(final Widget attached_widget, final String prop_id) { this(attached_widget, new ArrayList<>(), new ArrayList<>(), Messages.RulesDialog_DefaultRuleName, prop_id, false); } public RuleItem(final Widget attached_widget, final List<ExprItem<?>> exprs, final List<PVTableItem> pvs, final String name, final String prop_id, final boolean prop_as_exp) { this.attached_widget = attached_widget; this.expressions = exprs; this.pvs = pvs; this.name.set(name); this.prop_id.set(prop_id); this.prop_as_expr.set(prop_as_exp); } public static RuleItem forInfo(final Widget attached_widget, final RuleInfo info, final UndoableActionManager undo) { final List<PVTableItem> pvs = new ArrayList<>(); info.getPVs().forEach(pv -> pvs.add(PVTableItem.forPV(pv))); final List<ExprItem<?>> exprs = new ArrayList<>(); info.getExpressions().forEach(expr -> { try { exprs.add(ExprItemFactory.InfoToItem(expr, undo)); } catch (Exception ex) { logger.log(Level.WARNING, "Error converting " + expr, ex); } }); return new RuleItem(attached_widget, exprs, pvs, info.getName(), info.getPropID(), info.getPropAsExprFlag()); } public RuleInfo getRuleInfo() { final List<ScriptPV> spvs = new ArrayList<>(); pvs.forEach(pv -> spvs.add(pv.toScriptPV())); final List<ExpressionInfo<?>> exps = new ArrayList<>(); expressions.forEach(exp -> exps.add(exp.toExprInfo())); return new RuleInfo(name.get(), prop_id.get(), prop_as_expr.get(), exps, spvs); } public StringProperty nameProperty() { return name; } public StringProperty propIDProperty() { return prop_id; } public static ExprItem<?> addNewExpr( final UndoableActionManager undo, final ExprItem<?> old_exp, final Widget attached_widget, List<ExprItem<?>> expls, final String prop_id, final boolean prop_as_expr) { final Object new_prop; if (prop_as_expr) new_prop = prop_id + " value"; else new_prop = RulesWidgetProperty.propIDToNewProp(attached_widget, prop_id, ""); ExprItem<?> new_exp = null; try { if (old_exp != null) new_exp = ExprItemFactory.makeNewFromOld(new_prop, old_exp, undo); else new_exp = ExprItemFactory.makeNew(new_prop, undo); expls.add(new_exp); } catch (Exception ex) { logger.log(Level.WARNING, "Rule expression error", ex); } return new_exp; } public ExprItem<?> addNewExpr(final UndoableActionManager undo) { return addNewExpr(undo, null, attached_widget, expressions, prop_id.get(), prop_as_expr.get()); } public boolean tryTogglePropAsExpr(final UndoableActionManager undo, boolean new_val) { if (prop_as_expr.get() == new_val) return false; List<ExprItem<?>> new_expr = new ArrayList<>(); expressions.forEach(expr -> addNewExpr(undo, expr, attached_widget, new_expr, prop_id.get(), new_val)); prop_as_expr.set(new_val); expressions = new_expr; return true; } public boolean tryUpdatePropID(final UndoableActionManager undo, String new_prop_id) { if (new_prop_id.equals(prop_id.get())) return false; prop_id.set(new_prop_id); // If just an output expression string. No need to change objects if (prop_as_expr.get()) return true; final List<ExprItem<?>> new_exps = new ArrayList<>(); for (final ExprItem<?> exp : expressions) { WidgetProperty<?> new_prop = RulesWidgetProperty.propIDToNewProp(attached_widget, prop_id.get(), ""); try { new_exps.add(ExprItemFactory.makeNewFromOld(new_prop, exp, undo)); } catch (Exception ex) { logger.log(Level.WARNING, "Rule error", ex); } } expressions = new_exps; return true; } }; /** Data that is linked to the rules_table */ private final ObservableList<RuleItem> rule_items = FXCollections.observableArrayList(); /** Table for all rules */ private TableView<RuleItem> rules_table; /** Data that is linked to the pvs_table */ private final ObservableList<PVTableItem> pv_items = FXCollections.observableArrayList(); /** Table for PVs of currently selected rule */ private TableView<PVTableItem> pvs_table; /** Data that is linked to the expressions_table */ private final ObservableList<ExprItem<?>> expression_items = FXCollections.observableArrayList(); /** Table for PVs of currently selected rule */ private TableView<ExprItem<?>> expressions_table; /** Buttons for removing or reordering rules **/ private Button btn_add_rule, btn_dup_rule, btn_remove_rule, btn_move_rule_up, btn_move_rule_down, btn_show_script; /** Buttons for adding/removing PVs and expressions from the selected rule **/ private Button btn_add_pv, btn_rm_pv, btn_move_pv_up, btn_move_pv_down, btn_add_exp, btn_rm_exp, btn_move_exp_up, btn_move_exp_down; /** Currently selected rule **/ private RuleItem selected_rule_item = null; /** Widget name and type for the header bar **/ private final Widget attached_widget; /** Undo actions for choosing property values in expressions **/ private final UndoableActionManager undo; /** Autocomplete menu for pv names */ private final AutocompleteMenu menu; /** Property options for target of expression **/ private final List<PropInfo> propinfo_ls; private ComboBox<String> propComboBox; private static final int MAX_PROP_LENGTH = 40; /** Is the property value an expressions (i.e. user input string) **/ private CheckBox valExpBox; /** The splitter used in the rule side. */ private SplitPane ruleSplitPane; /** turn this rule's property into the long string form used in the combo box **/ public String getPropLongString(RuleItem rule) { final PropInfo pi = new PropInfo(rule.attached_widget, rule.prop_id.get()); return pi.toString(); } /** @param rules Rules to show/edit in the dialog */ public RulesDialog(final UndoableActionManager undo, final List<RuleInfo> rules, final Widget attached_widget, final AutocompleteMenu menu) { this.undo = undo; this.attached_widget = attached_widget; this.menu = menu; this.propinfo_ls = RuleInfo.getTargettableProperties(attached_widget); setTitle(Messages.RulesDialog_Title); setHeaderText(Messages.RulesDialog_Info + ": " + attached_widget.getType() + " " + attached_widget.getName()); final Node node = JFXBaseRepresentation.getJFXNode(attached_widget); initOwner(node.getScene().getWindow()); rules.forEach(rule -> rule_items.add(RuleItem.forInfo(attached_widget, rule, undo))); fixupRules(0); final SplitPane content = createContent(); getDialogPane().setContent(content); getDialogPane().getButtonTypes().addAll(ButtonType.OK, ButtonType.CANCEL); // use same stylesheet as ScriptsDialog, ActionsDialog getDialogPane().getStylesheets().add(ScriptsDialog.class.getResource("opibuilder.css").toExternalForm()); setResizable(true); setResultConverter(button -> { if (button != ButtonType.OK) return null; return rule_items.stream() .filter(item -> ! item.name.get().isEmpty()) .map(RuleItem::getRuleInfo) .collect(Collectors.toList()); }); setOnHidden(event -> { final Preferences pref = Preferences.userNodeForPackage(RulesDialog.class); pref.putDouble("content.width", content.getWidth()); pref.putDouble("content.height", content.getHeight()); pref.putDouble("content.divider.position", content.getDividerPositions()[0]); pref.putDouble("rule.content.divider.position", ruleSplitPane.getDividerPositions()[0]); try { pref.flush(); } catch (BackingStoreException ex) { logger.log(Level.WARNING, "Unable to flush preferences", ex); } }); } private SplitPane createContent() { final Node rules = createRulesTable(); final HBox pvs = createPVsTable(); final HBox exprs = createExpressionsTable(); // Display PVs of currently selected rule rules_table.getSelectionModel().selectedItemProperty().addListener( (prop, old, selected) -> { selected_rule_item = selected; if (selected == null) { pvs.setDisable(true); exprs.setDisable(true); btn_remove_rule.setDisable(true); btn_dup_rule.setDisable(true); btn_move_rule_up.setDisable(true); btn_move_rule_down.setDisable(true); btn_show_script.setDisable(true); propComboBox.setDisable(true); propComboBox.getSelectionModel().select(null); valExpBox.setDisable(true); pv_items.clear(); expression_items.clear(); } else { pvs.setDisable(false); exprs.setDisable(false); final TableViewSelectionModel<RuleItem> model = rules_table.getSelectionModel(); btn_remove_rule.setDisable(false); btn_dup_rule.setDisable(false); btn_move_rule_up.setDisable(model.getSelectedIndex() == 0); btn_move_rule_down.setDisable(model.getSelectedIndex() == rule_items.size() - 1); btn_show_script.setDisable(false); propComboBox.setDisable(false); propComboBox.getSelectionModel().select(getPropLongString(selected)); valExpBox.setDisable(false); valExpBox.selectedProperty().set(selected.prop_as_expr.get()); pv_items.setAll(selected.pvs); expression_items.setAll(selected.expressions); fixupPVs(0); } }); // Update PVs of selected rule from PVs table final ListChangeListener<PVTableItem> pll = change -> { final RuleItem selected = rules_table.getSelectionModel().getSelectedItem(); if (selected != null) selected.pvs = new ArrayList<>(change.getList()); }; pv_items.addListener(pll); // Update buttons for currently selected PV pvs_table.getSelectionModel().selectedItemProperty().addListener( (prop, old, selected) -> { if (selected == null) { btn_rm_pv.setDisable(true); btn_move_pv_up.setDisable(true); btn_move_pv_down.setDisable(true); } else { final TableViewSelectionModel<PVTableItem> model = pvs_table.getSelectionModel(); btn_rm_pv.setDisable(false); btn_move_pv_up.setDisable(model.getSelectedIndex() == 0); btn_move_pv_down.setDisable(model.getSelectedIndex() == pv_items.size() - 1); } }); // Update Expressions of selected rule from Expressions table final ListChangeListener<ExprItem<?>> ell = change -> { final RuleItem selected = rules_table.getSelectionModel().getSelectedItem(); if (selected != null) selected.expressions = new ArrayList<>(change.getList()); }; expression_items.addListener(ell); // Update buttons for currently selected expression expressions_table.getSelectionModel().selectedItemProperty().addListener( (prop, old, selected) -> { if (selected == null) { btn_rm_exp.setDisable(true); btn_move_exp_up.setDisable(true); btn_move_exp_down.setDisable(true); } else { final TableViewSelectionModel<ExprItem<?>> model = expressions_table.getSelectionModel(); btn_rm_exp.setDisable(false); btn_move_exp_up.setDisable(model.getSelectedIndex() == 0); btn_move_exp_down.setDisable(model.getSelectedIndex() == expression_items.size() - 1); } }); // What is the property id option we are using? final Label propLabel = new Label("Property ID:"); // Show each property with current value final ObservableList<String> prop_id_opts = FXCollections.observableArrayList(); for (PropInfo pi : propinfo_ls) { // Property _value_ can be long, ex. points of a polyline // Truncate the value that's shown in the combo box // to prevent combo from using all screen width. String prop_opt = pi.toString(); if (prop_opt.length() > MAX_PROP_LENGTH) prop_opt = prop_opt.substring(0, MAX_PROP_LENGTH) + "..."; prop_id_opts.add(prop_opt); } propComboBox = new ComboBox<String>(prop_id_opts); propComboBox.setDisable(true); propComboBox.getSelectionModel().selectedIndexProperty().addListener( (p, o, index) -> { // Select property info based on index within combo. final int idx = index.intValue(); if (idx >= 0) { final PropInfo prop = propinfo_ls.get(idx); if (selected_rule_item.tryUpdatePropID(undo, prop.getPropID())) expression_items.setAll(selected_rule_item.expressions); } }); propComboBox.setMinHeight(27); propComboBox.setMaxWidth(Double.MAX_VALUE); HBox.setHgrow(propComboBox, Priority.ALWAYS); // TODO: change this to actually manipulate expression objects in the rule valExpBox = new CheckBox("Value as Expression"); valExpBox.setDisable(true); valExpBox.selectedProperty().addListener( (ov, old_val, new_val) -> { if (!selected_rule_item.tryTogglePropAsExpr(undo, new_val)) logger.log(Level.FINE, "Did not update rule property as expression flag to " + new_val); else expression_items.setAll(selected_rule_item.expressions); }); final Region spring = new Region(); HBox.setHgrow(spring, Priority.ALWAYS); final HBox props = new HBox(10, propLabel, propComboBox, spring, valExpBox); props.setAlignment(Pos.CENTER); pvs.setPadding(new Insets(0, 10, 0, 0)); exprs.setPadding(new Insets(0, 0, 0, 10)); HBox.setHgrow(pvs, Priority.ALWAYS); HBox.setHgrow(exprs, Priority.ALWAYS); final Preferences pref = Preferences.userNodeForPackage(RulesDialog.class); final double prefRSPDividerPosition = pref.getDouble("rule.content.divider.position", 0.5); ruleSplitPane = new SplitPane(pvs, exprs); ruleSplitPane.setOrientation(Orientation.HORIZONTAL); ruleSplitPane.setDividerPositions(prefRSPDividerPosition); ruleSplitPane.setStyle("-fx-background-insets: 0, 0;"); VBox.setVgrow(ruleSplitPane, Priority.ALWAYS); final VBox subitems = new VBox(10, props, ruleSplitPane); final VBox rulebox = new VBox(10, rules); rulebox.setPadding(new Insets(0, 10, 0, 0)); subitems.setPadding(new Insets(0, 0, 0, 10)); VBox.setVgrow(rules, Priority.ALWAYS); HBox.setHgrow(subitems, Priority.ALWAYS); final double prefWidth = pref.getDouble("content.width", -1); final double prefHeight = pref.getDouble("content.height", -1); final double prefDividerPosition = pref.getDouble("content.divider.position", 0.3); final SplitPane splitPane = new SplitPane(rulebox, subitems); splitPane.setOrientation(Orientation.HORIZONTAL); splitPane.setDividerPositions(prefDividerPosition); if (prefWidth > 0 && prefHeight > 0) splitPane.setPrefSize(prefWidth, prefHeight); // Select the first rule if (!rules_table.getItems().isEmpty()) { Platform.runLater(() -> { rules_table.getSelectionModel().select(0); rules_table.requestFocus(); }); } else Platform.runLater(() -> btn_add_rule.requestFocus()); return splitPane; } /** @return Node for UI elements that edit the rules */ private Node createRulesTable () { // Create table with editable rule 'name' column final TableColumn<RuleItem, String> name_col = new TableColumn<>(Messages.RulesDialog_ColName); name_col.setCellValueFactory(new PropertyValueFactory<RuleItem, String>("name")); name_col.setCellFactory(list -> new TextFieldTableCell<RuleItem, String>(new DefaultStringConverter()) { private final ChangeListener<? super Boolean> focusedListener = (ob, o, n) -> { if (!n) cancelEdit(); }; @Override public void cancelEdit() { ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); super.cancelEdit(); } @Override public void commitEdit(final String newValue) { ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); super.commitEdit(newValue); Platform.runLater( ( ) -> btn_add_pv.requestFocus()); } @Override public void startEdit() { super.startEdit(); ( (TextField) getGraphic() ).focusedProperty().addListener(focusedListener); } }); name_col.setOnEditCommit(event -> { final int row = event.getTablePosition().getRow(); rule_items.get(row).name.set(event.getNewValue()); fixupRules(row); }); rules_table = new TableView<>(rule_items); rules_table.getColumns().add(name_col); rules_table.setEditable(true); rules_table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); rules_table.setTooltip(new Tooltip(Messages.RulesDialog_RulesTT)); rules_table.setPlaceholder(new Label(Messages.RulesDialog_NoRules)); // Buttons btn_add_rule = new Button(Messages.Add, JFXUtil.getIcon("add.png")); btn_add_rule.setMaxWidth(Double.MAX_VALUE); btn_add_rule.setAlignment(Pos.CENTER_LEFT); btn_add_rule.setOnAction(event -> { final RuleItem newItem = new RuleItem( attached_widget, selected_rule_item == null ? ( ( propinfo_ls.size() == 0 ) ? "" : propinfo_ls.get(0).getPropID() ) : selected_rule_item.prop_id.get() ); rule_items.add(newItem); rules_table.getSelectionModel().select(newItem); final int newRow = rules_table.getSelectionModel().getSelectedIndex(); ModelThreadPool.getTimer().schedule(() -> { Platform.runLater(() -> rules_table.edit(newRow, name_col)); }, 123, TimeUnit.MILLISECONDS); }); btn_remove_rule = new Button(Messages.Remove, JFXUtil.getIcon("delete.png")); btn_remove_rule.setMaxWidth(Double.MAX_VALUE); btn_remove_rule.setAlignment(Pos.CENTER_LEFT); btn_remove_rule.setDisable(true); btn_remove_rule.setOnAction(event -> { final int sel = rules_table.getSelectionModel().getSelectedIndex(); if (sel >= 0) { rule_items.remove(sel); fixupRules(sel); } }); btn_move_rule_up = new Button(Messages.MoveUp, JFXUtil.getIcon("up.png")); btn_move_rule_up.setMaxWidth(Double.MAX_VALUE); btn_move_rule_up.setAlignment(Pos.CENTER_LEFT); btn_move_rule_up.setDisable(true); btn_move_rule_up.setOnAction(event -> TableHelper.move_item_up(rules_table, rule_items)); btn_move_rule_down = new Button(Messages.MoveDown, JFXUtil.getIcon("down.png")); btn_move_rule_down.setMaxWidth(Double.MAX_VALUE); btn_move_rule_down.setAlignment(Pos.CENTER_LEFT); btn_move_rule_down.setDisable(true); btn_move_rule_down.setOnAction(event -> TableHelper.move_item_down(rules_table, rule_items)); btn_dup_rule = new Button(Messages.Duplicate, JFXUtil.getIcon("file-duplicate.png")); btn_dup_rule.setMaxWidth(Double.MAX_VALUE); btn_dup_rule.setAlignment(Pos.CENTER_LEFT); btn_dup_rule.setDisable(true); btn_dup_rule.setOnAction(event -> { if (selected_rule_item != null) { final RuleItem newItem = RuleItem.forInfo(attached_widget, selected_rule_item.getRuleInfo(), undo); if (!newItem.nameProperty().get().endsWith(" (duplicate)")) newItem.nameProperty().set(newItem.nameProperty().get() + " (duplicate)"); rule_items.add(newItem); rules_table.getSelectionModel().select(newItem); final int newRow = rules_table.getSelectionModel().getSelectedIndex(); ModelThreadPool.getTimer().schedule(() -> { Platform.runLater( ( ) -> rules_table.edit(newRow, name_col)); }, 123, TimeUnit.MILLISECONDS); } }); btn_show_script = new Button(Messages.RulesDialog_ShowScript, JFXUtil.getIcon("file.png")); btn_show_script.setMaxWidth(Double.MAX_VALUE); btn_show_script.setMinWidth(120); btn_dup_rule.setAlignment(Pos.CENTER_LEFT); btn_show_script.setDisable(true); btn_show_script.setOnAction(event -> { final int sel = rules_table.getSelectionModel().getSelectedIndex(); if (sel >= 0) { final String content = rule_items.get(sel).getRuleInfo().getTextPy(attached_widget); final SyntaxHighlightedMultiLineInputDialog dialog = new SyntaxHighlightedMultiLineInputDialog( btn_show_script, content, Language.Python, false ); DialogHelper.positionDialog(dialog, btn_show_script, -200, -300); dialog.setTextHeight(600); dialog.show(); } }); final VBox buttons = new VBox(10, btn_add_rule, btn_remove_rule, btn_move_rule_up, btn_move_rule_down, new Pane(), btn_dup_rule, btn_show_script); final HBox content = new HBox(10, rules_table, buttons); HBox.setHgrow(rules_table, Priority.ALWAYS); HBox.setHgrow(buttons, Priority.NEVER); return content; } /** Fix rules data: Delete empty rows in middle * @param changed_row Row to check, and remove if it's empty */ private void fixupRules(final int changed_row) { // Check if edited row is now empty and should be deleted if (changed_row < rule_items.size()) { final RuleItem item = rule_items.get(changed_row); if (item.nameProperty().get().trim().isEmpty()) rule_items.remove(changed_row); } } /** @return Node for UI elements that edit the expressions */ private HBox createExpressionsTable () { // Create table with editable rule 'bool expression' column final TableColumn<ExprItem<?>, String> bool_exp_col = new TableColumn<>(Messages.RulesDialog_ColBoolExp); bool_exp_col.setSortable(false); bool_exp_col.setCellValueFactory(new PropertyValueFactory<ExprItem<?>, String>("boolExp")); bool_exp_col.setCellFactory(tableColumn -> new TextFieldTableCell<ExprItem<?>, String>(new DefaultStringConverter()) { private final ChangeListener<? super Boolean> focusedListener = (ob, o, n) -> { if (!n) cancelEdit(); }; /* Instance initializer. */ { setAlignment(Pos.CENTER_LEFT); } @Override public void cancelEdit() { ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); super.cancelEdit(); } @Override public void commitEdit (final String newValue) { ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); super.commitEdit(newValue); } @Override public void startEdit() { super.startEdit(); ( (TextField) getGraphic() ).focusedProperty().addListener(focusedListener); } }); // Create table with editable rule 'value expression' column final TableColumn<ExprItem<?>, Node> val_exp_col = new TableColumn<>(Messages.RulesDialog_ColValExp); // This statement requires "val_exp_col" be defined. bool_exp_col.setOnEditCommit(event -> { final int row = event.getTablePosition().getRow(); expression_items.get(row).boolExpProperty().set(event.getNewValue()); ModelThreadPool.getTimer().schedule(() -> { Platform.runLater(() -> val_exp_col.getCellData(row).requestFocus()); }, 123, TimeUnit.MILLISECONDS); }); val_exp_col.setSortable(false); val_exp_col.setCellValueFactory(new PropertyValueFactory<ExprItem<?>, Node>("field")); val_exp_col.setCellFactory(tableColumn -> new TableCell<ExprItem<?>, Node>() { @Override protected void updateItem (final Node item, final boolean empty) { // calling super here is very important - don't skip this! super.updateItem(item, empty); setGraphic(item); } }); val_exp_col.setOnEditCommit(event -> { final int row = event.getTablePosition().getRow(); expression_items.get(row).fieldProperty().set(event.getNewValue()); event.consume(); ModelThreadPool.getTimer().schedule(() -> { Platform.runLater(() -> btn_add_exp.requestFocus()); }, 1230, TimeUnit.MILLISECONDS); }); expressions_table = new TableView<>(expression_items); expressions_table.getColumns().add(bool_exp_col); expressions_table.getColumns().add(val_exp_col); expressions_table.setEditable(true); expressions_table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); expressions_table.setTooltip(new Tooltip(Messages.RulesDialog_ExpressionsTT)); expressions_table.setPlaceholder(new Label(Messages.RulesDialog_NoExpressions)); // Buttons btn_add_exp = new Button(Messages.Add, JFXUtil.getIcon("add.png")); btn_add_exp.setMaxWidth(Double.MAX_VALUE); btn_add_exp.setAlignment(Pos.CENTER_LEFT); btn_add_exp.setOnAction(event -> { selected_rule_item.addNewExpr(undo); expression_items.setAll(selected_rule_item.expressions); expressions_table.getSelectionModel().select(expression_items.size() - 1); final int newRow = expression_items.size() - 1; ModelThreadPool.getTimer().schedule(() -> { Platform.runLater(() -> expressions_table.edit(newRow, bool_exp_col)); }, 123, TimeUnit.MILLISECONDS); }); btn_rm_exp = new Button(Messages.Remove, JFXUtil.getIcon("delete.png")); btn_rm_exp.setMaxWidth(Double.MAX_VALUE); btn_rm_exp.setMinWidth(96); btn_rm_exp.setAlignment(Pos.CENTER_LEFT); btn_rm_exp.setDisable(true); btn_rm_exp.setOnAction(event -> { final int sel = expressions_table.getSelectionModel().getSelectedIndex(); if (sel >= 0) expression_items.remove(sel); }); btn_move_exp_up = new Button(Messages.MoveUp, JFXUtil.getIcon("up.png")); btn_move_exp_up.setMaxWidth(Double.MAX_VALUE); btn_move_exp_up.setAlignment(Pos.CENTER_LEFT); btn_move_exp_up.setDisable(true); btn_move_exp_up.setOnAction(event -> TableHelper.move_item_up(expressions_table, expression_items)); btn_move_exp_down = new Button(Messages.MoveDown, JFXUtil.getIcon("down.png")); btn_move_exp_down.setMaxWidth(Double.MAX_VALUE); btn_move_exp_down.setAlignment(Pos.CENTER_LEFT); btn_move_exp_down.setDisable(true); btn_move_exp_down.setOnAction(event -> TableHelper.move_item_down(expressions_table, expression_items)); final VBox buttons = new VBox(10, btn_add_exp, btn_rm_exp, btn_move_exp_up, btn_move_exp_down); final HBox content = new HBox(10, expressions_table, buttons); HBox.setHgrow(expressions_table, Priority.ALWAYS); HBox.setHgrow(buttons, Priority.NEVER); content.setDisable(true); return content; } /** @return Node for UI elements that edit the PVs of a rule */ private HBox createPVsTable() { final TableColumn<PVTableItem, Integer> indexColumn = new TableColumn<>("#"); indexColumn.setEditable(false); indexColumn.setSortable(false); indexColumn.setCellFactory(new LineNumberTableCellFactory<>(true)); indexColumn.setMaxWidth(26); indexColumn.setMinWidth(26); // Create table with editable 'name' column final TableColumn<PVTableItem, String> name_col = new TableColumn<>(Messages.ScriptsDialog_ColPV); name_col.setSortable(false); name_col.setCellValueFactory(new PropertyValueFactory<PVTableItem, String>("name")); name_col.setCellFactory(col -> new AutoCompletedTableCell(menu, btn_add_pv)); name_col.setOnEditCommit(event -> { final int row = event.getTablePosition().getRow(); pv_items.get(row).nameProperty().set(event.getNewValue()); fixupPVs(row); }); // Table column for 'trigger' uses CheckBoxTableCell that directly // modifies the Observable Property final TableColumn<PVTableItem, Boolean> trigger_col = new TableColumn<>(Messages.ScriptsDialog_ColTrigger); trigger_col.setSortable(false); trigger_col.setCellValueFactory(new PropertyValueFactory<PVTableItem, Boolean>("trigger")); trigger_col.setCellFactory(CheckBoxTableCell.<PVTableItem> forTableColumn(trigger_col)); trigger_col.setResizable(false); trigger_col.setMaxWidth(70); trigger_col.setMinWidth(70); pvs_table = new TableView<>(pv_items); pvs_table.getColumns().add(indexColumn); pvs_table.getColumns().add(name_col); pvs_table.getColumns().add(trigger_col); pvs_table.setEditable(true); pvs_table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); pvs_table.setTooltip(new Tooltip(Messages.RulesDialog_PVsTT)); pvs_table.setPlaceholder(new Label(Messages.RulesDialog_NoPVs)); // Buttons btn_add_pv = new Button(Messages.Add, JFXUtil.getIcon("add.png")); btn_add_pv.setMaxWidth(Double.MAX_VALUE); btn_add_pv.setAlignment(Pos.CENTER_LEFT); btn_add_pv.setOnAction(event -> { final PVTableItem newItem = new PVTableItem("new-PV", true); pv_items.add(newItem); pvs_table.getSelectionModel().select(newItem); final int newRow = pvs_table.getSelectionModel().getSelectedIndex(); ModelThreadPool.getTimer().schedule(() -> { Platform.runLater(() -> pvs_table.edit(newRow, name_col)); }, 123, TimeUnit.MILLISECONDS); }); btn_rm_pv = new Button(Messages.Remove, JFXUtil.getIcon("delete.png")); btn_rm_pv.setMaxWidth(Double.MAX_VALUE); btn_rm_pv.setMinWidth(96); btn_rm_pv.setAlignment(Pos.CENTER_LEFT); btn_rm_pv.setDisable(true); btn_rm_pv.setOnAction(event -> { final int sel = pvs_table.getSelectionModel().getSelectedIndex(); if (sel >= 0) { pv_items.remove(sel); fixupPVs(sel); } }); btn_move_pv_up = new Button(Messages.MoveUp, JFXUtil.getIcon("up.png")); btn_move_pv_up.setMaxWidth(Double.MAX_VALUE); btn_move_pv_up.setAlignment(Pos.CENTER_LEFT); btn_move_pv_up.setDisable(true); btn_move_pv_up.setOnAction(event -> TableHelper.move_item_up(pvs_table, pv_items)); btn_move_pv_down = new Button(Messages.MoveDown, JFXUtil.getIcon("down.png")); btn_move_pv_down.setMaxWidth(Double.MAX_VALUE); btn_move_pv_down.setAlignment(Pos.CENTER_LEFT); btn_move_pv_down.setDisable(true); btn_move_pv_down.setOnAction(event -> TableHelper.move_item_down(pvs_table, pv_items)); final VBox buttons = new VBox(10, btn_add_pv, btn_rm_pv, btn_move_pv_up, btn_move_pv_down); final HBox content = new HBox(10, pvs_table, buttons); HBox.setHgrow(pvs_table, Priority.ALWAYS); HBox.setHgrow(buttons, Priority.NEVER); content.setDisable(true); return content; } /** Fix PVs data: Delete empty rows in middle * @param changed_row Row to check, and remove if it's empty */ private void fixupPVs(final int changed_row) { // Check if edited row is now empty and should be deleted if (changed_row < pv_items.size()) { final PVTableItem item = pv_items.get(changed_row); if (item.nameProperty().get().trim().isEmpty()) pv_items.remove(changed_row); } } }
org.csstudio.display.builder.editor/src/org/csstudio/display/builder/editor/properties/RulesDialog.java
/******************************************************************************* * Copyright (c) 2015-2018 Oak Ridge National Laboratory. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html *******************************************************************************/ package org.csstudio.display.builder.editor.properties; import static org.csstudio.display.builder.editor.Plugin.logger; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.prefs.BackingStoreException; import java.util.prefs.Preferences; import java.util.stream.Collectors; import org.csstudio.display.builder.model.Widget; import org.csstudio.display.builder.model.WidgetProperty; import org.csstudio.display.builder.model.properties.CommonWidgetProperties; import org.csstudio.display.builder.model.properties.RulesWidgetProperty; import org.csstudio.display.builder.model.properties.ScriptPV; import org.csstudio.display.builder.model.rules.RuleInfo; import org.csstudio.display.builder.model.rules.RuleInfo.ExprInfoString; import org.csstudio.display.builder.model.rules.RuleInfo.ExprInfoValue; import org.csstudio.display.builder.model.rules.RuleInfo.ExpressionInfo; import org.csstudio.display.builder.model.rules.RuleInfo.PropInfo; import org.csstudio.display.builder.model.util.ModelThreadPool; import org.csstudio.display.builder.representation.javafx.AutocompleteMenu; import org.csstudio.display.builder.representation.javafx.JFXUtil; import org.csstudio.display.builder.representation.javafx.Messages; import org.csstudio.display.builder.representation.javafx.PVTableItem; import org.csstudio.display.builder.representation.javafx.PVTableItem.AutoCompletedTableCell; import org.csstudio.display.builder.representation.javafx.ScriptsDialog; import org.csstudio.display.builder.representation.javafx.widgets.JFXBaseRepresentation; import org.csstudio.display.builder.util.undo.UndoableActionManager; import org.csstudio.javafx.DialogHelper; import org.csstudio.javafx.LineNumberTableCellFactory; import org.csstudio.javafx.SyntaxHighlightedMultiLineInputDialog; import org.csstudio.javafx.SyntaxHighlightedMultiLineInputDialog.Language; import org.csstudio.javafx.TableHelper; import javafx.application.Platform; import javafx.beans.property.BooleanProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.beans.property.SimpleObjectProperty; import javafx.beans.property.SimpleStringProperty; import javafx.beans.property.StringProperty; import javafx.beans.value.ChangeListener; import javafx.collections.FXCollections; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.geometry.Insets; import javafx.geometry.Orientation; import javafx.geometry.Pos; import javafx.scene.Node; import javafx.scene.control.Button; import javafx.scene.control.ButtonType; import javafx.scene.control.CheckBox; import javafx.scene.control.ComboBox; import javafx.scene.control.Dialog; import javafx.scene.control.Label; import javafx.scene.control.SplitPane; import javafx.scene.control.TableCell; import javafx.scene.control.TableColumn; import javafx.scene.control.TableView; import javafx.scene.control.TableView.TableViewSelectionModel; import javafx.scene.control.TextField; import javafx.scene.control.Tooltip; import javafx.scene.control.cell.CheckBoxTableCell; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.control.cell.TextFieldTableCell; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.Priority; import javafx.scene.layout.Region; import javafx.scene.layout.VBox; import javafx.util.converter.DefaultStringConverter; /** Dialog for editing {@link RuleInfo}s * @author Megan Grodowitz */ @SuppressWarnings("nls") public class RulesDialog extends Dialog<List<RuleInfo>> { /** Expression info as property-based item for table */ public abstract static class ExprItem<T> { final protected StringProperty boolExp = new SimpleStringProperty(); final protected SimpleObjectProperty<Node> field = new SimpleObjectProperty<Node>(); final protected List<WidgetPropertyBinding<?,?>> bindings = new ArrayList<>(); public ExprItem(final String boolE, final T valE, final UndoableActionManager undo) { this.boolExp.set(boolE); } public SimpleObjectProperty<Node> fieldProperty() { return field; } public StringProperty boolExpProperty() { return boolExp; } abstract boolean isWidgetProperty(); abstract public ExpressionInfo<T> toExprInfo(); abstract public T getPropVal(); }; public static class ExprItemString extends ExprItem<String> { final protected Widget widget = new Widget("ExprItemString"); final protected WidgetProperty<String> string_prop; protected String internal_prop_val; public ExprItemString(String bool_exp, String prop_val, UndoableActionManager undo) { super(bool_exp, prop_val, undo); internal_prop_val = prop_val; string_prop = CommonWidgetProperties.propText.createProperty(widget, prop_val); field.setValue(PropertyPanelSection. bindSimplePropertyField(undo, bindings, string_prop, new ArrayList<Widget>())); } @Override boolean isWidgetProperty() { return false; } @Override public String getPropVal() { internal_prop_val = string_prop.getValue(); return internal_prop_val; } @Override public ExprInfoString toExprInfo() { return new ExprInfoString(boolExp.get(), getPropVal()); } }; public static class ExprItemValue<T> extends ExprItem< WidgetProperty<T> > { protected final WidgetProperty<T> internal_prop_val; public ExprItemValue(String bool_exp, WidgetProperty<T> prop_val, UndoableActionManager undo) { super(bool_exp, prop_val, undo); internal_prop_val = prop_val; field.setValue(PropertyPanelSection. bindSimplePropertyField(undo, bindings, prop_val, new ArrayList<Widget>())); } @Override public ExprInfoValue<T> toExprInfo() { return new ExprInfoValue<T>(boolExp.get(), internal_prop_val); } @Override boolean isWidgetProperty() { return true; } @Override public WidgetProperty<T> getPropVal() { return internal_prop_val; } }; public static class ExprItemFactory { public static <T> ExprItem<?> InfoToItem( final ExpressionInfo<T> info, final UndoableActionManager undo) throws Exception { if (info.getPropVal() instanceof String) return new ExprItemString(info.getBoolExp(), (String)info.getPropVal(), undo); if (info.getPropVal() instanceof WidgetProperty<?>) return new ExprItemValue<>(info.getBoolExp(), (WidgetProperty<?>)info.getPropVal(), undo); logger.log(Level.WARNING, "Tried to make new Expression from info with property not of type String or WidgetProperty: " + info.getPropVal().getClass().getName()); throw new Exception("Invalid info property type"); } public static <T> ExprItem<?> makeNew( final T property, final UndoableActionManager undo) throws Exception { if (property instanceof String) return new ExprItemString("new expr", (String)property, undo); if (property instanceof WidgetProperty<?>) return new ExprItemValue<>("new exp", (WidgetProperty<?>)property, undo); logger.log(Level.WARNING, "Tried to make new Expression from property not of type String or WidgetProperty: " + property.getClass().getName()); throw new Exception("Invalid property type"); } public static <T> ExprItem<?> makeNewFromOld( final T property, final ExprItem<?> old_exp, final UndoableActionManager undo) throws Exception { if (property instanceof String) return new ExprItemString(old_exp.boolExpProperty().get(), (String)property, undo); if (property instanceof WidgetProperty<?>) return new ExprItemValue<>(old_exp.boolExpProperty().get(), (WidgetProperty<?>)property, undo); logger.log(Level.WARNING,"Tried to make new Expression from property not of type String or WidgetProperty: " + property.getClass().getName()); throw new Exception("Invalid property type"); } } /** Modifiable RuleInfo */ public static class RuleItem { public List<ExprItem<?>> expressions; public List<PVTableItem> pvs; protected StringProperty name = new SimpleStringProperty(); protected StringProperty prop_id = new SimpleStringProperty(); public BooleanProperty prop_as_expr = new SimpleBooleanProperty(false); protected Widget attached_widget = null; public RuleItem(final Widget attached_widget, final String prop_id) { this(attached_widget, new ArrayList<>(), new ArrayList<>(), Messages.RulesDialog_DefaultRuleName, prop_id, false); } public RuleItem(final Widget attached_widget, final List<ExprItem<?>> exprs, final List<PVTableItem> pvs, final String name, final String prop_id, final boolean prop_as_exp) { this.attached_widget = attached_widget; this.expressions = exprs; this.pvs = pvs; this.name.set(name); this.prop_id.set(prop_id); this.prop_as_expr.set(prop_as_exp); } public static RuleItem forInfo(final Widget attached_widget, final RuleInfo info, final UndoableActionManager undo) { final List<PVTableItem> pvs = new ArrayList<>(); info.getPVs().forEach(pv -> pvs.add(PVTableItem.forPV(pv))); final List<ExprItem<?>> exprs = new ArrayList<>(); info.getExpressions().forEach(expr -> { try { exprs.add(ExprItemFactory.InfoToItem(expr, undo)); } catch (Exception ex) { logger.log(Level.WARNING, "Error converting " + expr, ex); } }); return new RuleItem(attached_widget, exprs, pvs, info.getName(), info.getPropID(), info.getPropAsExprFlag()); } public RuleInfo getRuleInfo() { final List<ScriptPV> spvs = new ArrayList<>(); pvs.forEach(pv -> spvs.add(pv.toScriptPV())); final List<ExpressionInfo<?>> exps = new ArrayList<>(); expressions.forEach(exp -> exps.add(exp.toExprInfo())); return new RuleInfo(name.get(), prop_id.get(), prop_as_expr.get(), exps, spvs); } public StringProperty nameProperty() { return name; } public StringProperty propIDProperty() { return prop_id; } public static ExprItem<?> addNewExpr( final UndoableActionManager undo, final ExprItem<?> old_exp, final Widget attached_widget, List<ExprItem<?>> expls, final String prop_id, final boolean prop_as_expr) { final Object new_prop; if (prop_as_expr) new_prop = prop_id + " value"; else new_prop = RulesWidgetProperty.propIDToNewProp(attached_widget, prop_id, ""); ExprItem<?> new_exp = null; try { if (old_exp != null) new_exp = ExprItemFactory.makeNewFromOld(new_prop, old_exp, undo); else new_exp = ExprItemFactory.makeNew(new_prop, undo); expls.add(new_exp); } catch (Exception ex) { logger.log(Level.WARNING, "Rule expression error", ex); } return new_exp; } public ExprItem<?> addNewExpr(final UndoableActionManager undo) { return addNewExpr(undo, null, attached_widget, expressions, prop_id.get(), prop_as_expr.get()); } public boolean tryTogglePropAsExpr(final UndoableActionManager undo, boolean new_val) { if (prop_as_expr.get() == new_val) return false; List<ExprItem<?>> new_expr = new ArrayList<>(); expressions.forEach(expr -> addNewExpr(undo, expr, attached_widget, new_expr, prop_id.get(), new_val)); prop_as_expr.set(new_val); expressions = new_expr; return true; } public boolean tryUpdatePropID(final UndoableActionManager undo, String new_prop_id) { if (new_prop_id.equals(prop_id.get())) return false; prop_id.set(new_prop_id); // If just an output expression string. No need to change objects if (prop_as_expr.get()) return true; final List<ExprItem<?>> new_exps = new ArrayList<>(); for (final ExprItem<?> exp : expressions) { WidgetProperty<?> new_prop = RulesWidgetProperty.propIDToNewProp(attached_widget, prop_id.get(), ""); try { new_exps.add(ExprItemFactory.makeNewFromOld(new_prop, exp, undo)); } catch (Exception ex) { logger.log(Level.WARNING, "Rule error", ex); } } expressions = new_exps; return true; } }; /** Data that is linked to the rules_table */ private final ObservableList<RuleItem> rule_items = FXCollections.observableArrayList(); /** Table for all rules */ private TableView<RuleItem> rules_table; /** Data that is linked to the pvs_table */ private final ObservableList<PVTableItem> pv_items = FXCollections.observableArrayList(); /** Table for PVs of currently selected rule */ private TableView<PVTableItem> pvs_table; /** Data that is linked to the expressions_table */ private final ObservableList<ExprItem<?>> expression_items = FXCollections.observableArrayList(); /** Table for PVs of currently selected rule */ private TableView<ExprItem<?>> expressions_table; /** Buttons for removing or reordering rules **/ private Button btn_add_rule, btn_dup_rule, btn_remove_rule, btn_move_rule_up, btn_move_rule_down, btn_show_script; /** Buttons for adding/removing PVs and expressions from the selected rule **/ private Button btn_add_pv, btn_rm_pv, btn_move_pv_up, btn_move_pv_down, btn_add_exp, btn_rm_exp, btn_move_exp_up, btn_move_exp_down; /** Currently selected rule **/ private RuleItem selected_rule_item = null; /** Widget name and type for the header bar **/ private final Widget attached_widget; /** Undo actions for choosing property values in expressions **/ private final UndoableActionManager undo; /** Autocomplete menu for pv names */ private final AutocompleteMenu menu; /** Property options for target of expression **/ private final List<PropInfo> propinfo_ls; private ComboBox<String> propComboBox; private static final int MAX_PROP_LENGTH = 40; /** Is the property value an expressions (i.e. user input string) **/ private CheckBox valExpBox; /** The splitter used in the rule side. */ private SplitPane ruleSplitPane; /** turn this rule's property into the long string form used in the combo box **/ public String getPropLongString(RuleItem rule) { final PropInfo pi = new PropInfo(rule.attached_widget, rule.prop_id.get()); return pi.toString(); } /** @param rules Rules to show/edit in the dialog */ public RulesDialog(final UndoableActionManager undo, final List<RuleInfo> rules, final Widget attached_widget, final AutocompleteMenu menu) { this.undo = undo; this.attached_widget = attached_widget; this.menu = menu; this.propinfo_ls = RuleInfo.getTargettableProperties(attached_widget); setTitle(Messages.RulesDialog_Title); setHeaderText(Messages.RulesDialog_Info + ": " + attached_widget.getType() + " " + attached_widget.getName()); final Node node = JFXBaseRepresentation.getJFXNode(attached_widget); initOwner(node.getScene().getWindow()); rules.forEach(rule -> rule_items.add(RuleItem.forInfo(attached_widget, rule, undo))); fixupRules(0); final SplitPane content = createContent(); getDialogPane().setContent(content); getDialogPane().getButtonTypes().addAll(ButtonType.OK, ButtonType.CANCEL); // use same stylesheet as ScriptsDialog, ActionsDialog getDialogPane().getStylesheets().add(ScriptsDialog.class.getResource("opibuilder.css").toExternalForm()); setResizable(true); setResultConverter(button -> { if (button != ButtonType.OK) return null; return rule_items.stream() .filter(item -> ! item.name.get().isEmpty()) .map(RuleItem::getRuleInfo) .collect(Collectors.toList()); }); setOnHidden(event -> { final Preferences pref = Preferences.userNodeForPackage(RulesDialog.class); pref.putDouble("content.width", content.getWidth()); pref.putDouble("content.height", content.getHeight()); pref.putDouble("content.divider.position", content.getDividerPositions()[0]); pref.putDouble("rule.content.divider.position", ruleSplitPane.getDividerPositions()[0]); try { pref.flush(); } catch (BackingStoreException ex) { logger.log(Level.WARNING, "Unable to flush preferences", ex); } }); } private SplitPane createContent() { final Node rules = createRulesTable(); final HBox pvs = createPVsTable(); final HBox exprs = createExpressionsTable(); // Display PVs of currently selected rule rules_table.getSelectionModel().selectedItemProperty().addListener( ( prop, old, selected ) -> { selected_rule_item = selected; if ( selected == null ) { pvs.setDisable(true); exprs.setDisable(true); btn_remove_rule.setDisable(true); btn_dup_rule.setDisable(true); btn_move_rule_up.setDisable(true); btn_move_rule_down.setDisable(true); btn_show_script.setDisable(true); propComboBox.setDisable(true); propComboBox.getSelectionModel().select(null); valExpBox.setDisable(true); pv_items.clear(); expression_items.clear(); } else { pvs.setDisable(false); exprs.setDisable(false); TableViewSelectionModel<RuleItem> model = rules_table.getSelectionModel(); btn_remove_rule.setDisable(false); btn_dup_rule.setDisable(false); btn_move_rule_up.setDisable(model.getSelectedIndex() == 0); btn_move_rule_down.setDisable(model.getSelectedIndex() == rule_items.size() - 1); btn_show_script.setDisable(false); propComboBox.setDisable(false); propComboBox.getSelectionModel().select(getPropLongString(selected)); valExpBox.setDisable(false); valExpBox.selectedProperty().set(selected.prop_as_expr.get()); pv_items.setAll(selected.pvs); expression_items.setAll(selected.expressions); fixupPVs(0); } }); // Update PVs of selected rule from PVs table final ListChangeListener<PVTableItem> pll = change -> { final RuleItem selected = rules_table.getSelectionModel().getSelectedItem(); if ( selected != null ) { selected.pvs = new ArrayList<>(change.getList()); } }; pv_items.addListener(pll); // Update buttons for currently selected PV pvs_table.getSelectionModel().selectedItemProperty().addListener( ( prop, old, selected ) -> { if ( selected == null ) { btn_rm_pv.setDisable(true); btn_move_pv_up.setDisable(true); btn_move_pv_down.setDisable(true); } else { TableViewSelectionModel<PVTableItem> model = pvs_table.getSelectionModel(); btn_rm_pv.setDisable(false); btn_move_pv_up.setDisable(model.getSelectedIndex() == 0); btn_move_pv_down.setDisable(model.getSelectedIndex() == pv_items.size() - 1); } }); // Update Expressions of selected rule from Expressions table final ListChangeListener<ExprItem<?>> ell = change -> { final RuleItem selected = rules_table.getSelectionModel().getSelectedItem(); if (selected != null) { selected.expressions = new ArrayList<>(change.getList()); } }; expression_items.addListener(ell); // Update buttons for currently selected expression expressions_table.getSelectionModel().selectedItemProperty().addListener( ( prop, old, selected ) -> { if ( selected == null ) { btn_rm_exp.setDisable(true); btn_move_exp_up.setDisable(true); btn_move_exp_down.setDisable(true); } else { TableViewSelectionModel<ExprItem<?>> model = expressions_table.getSelectionModel(); btn_rm_exp.setDisable(false); btn_move_exp_up.setDisable(model.getSelectedIndex() == 0); btn_move_exp_down.setDisable(model.getSelectedIndex() == expression_items.size() - 1); } }); // What is the property id option we are using? final Label propLabel = new Label("Property ID:"); // Show each property with current value final ObservableList<String> prop_id_opts = FXCollections.observableArrayList(); for (PropInfo pi : propinfo_ls) { // Property _value_ can be long, ex. points of a polyline // Truncate the value that's shown in the combo box // to prevent combo from using all screen width. String prop_opt = pi.toString(); if (prop_opt.length() > MAX_PROP_LENGTH) prop_opt = prop_opt.substring(0, MAX_PROP_LENGTH) + "..."; prop_id_opts.add(prop_opt); } propComboBox = new ComboBox<String>(prop_id_opts); propComboBox.setDisable(true); propComboBox.getSelectionModel().selectedIndexProperty().addListener( (p, o, index) -> { // Select property info based on index within combo. int idx = index.intValue(); if ( idx >= 0 ) { PropInfo prop = propinfo_ls.get(idx); if ( selected_rule_item.tryUpdatePropID(undo, prop.getPropID()) ) { expression_items.setAll(selected_rule_item.expressions); } } }); propComboBox.setMinHeight(27); propComboBox.setMaxWidth(Double.MAX_VALUE); HBox.setHgrow(propComboBox, Priority.ALWAYS); // TODO: change this to actually manipulate expression objects in the rule valExpBox = new CheckBox("Value as Expression"); valExpBox.setDisable(true); valExpBox.selectedProperty().addListener( (ov, old_val, new_val) -> { if (!selected_rule_item.tryTogglePropAsExpr(undo, new_val)) logger.log(Level.FINE, "Did not update rule property as expression flag to " + new_val); else expression_items.setAll(selected_rule_item.expressions); }); final Region spring = new Region(); HBox.setHgrow(spring, Priority.ALWAYS); final HBox props = new HBox(10, propLabel, propComboBox, spring, valExpBox); props.setAlignment(Pos.CENTER); pvs.setPadding(new Insets(0, 10, 0, 0)); exprs.setPadding(new Insets(0, 0, 0, 10)); HBox.setHgrow(pvs, Priority.ALWAYS); HBox.setHgrow(exprs, Priority.ALWAYS); final Preferences pref = Preferences.userNodeForPackage(RulesDialog.class); final double prefRSPDividerPosition = pref.getDouble("rule.content.divider.position", 0.5); ruleSplitPane = new SplitPane(pvs, exprs); ruleSplitPane.setOrientation(Orientation.HORIZONTAL); ruleSplitPane.setDividerPositions(prefRSPDividerPosition); ruleSplitPane.setStyle("-fx-background-insets: 0, 0;"); VBox.setVgrow(ruleSplitPane, Priority.ALWAYS); final VBox subitems = new VBox(10, props, ruleSplitPane); final VBox rulebox = new VBox(10, rules); rulebox.setPadding(new Insets(0, 10, 0, 0)); subitems.setPadding(new Insets(0, 0, 0, 10)); VBox.setVgrow(rules, Priority.ALWAYS); HBox.setHgrow(subitems, Priority.ALWAYS); final double prefWidth = pref.getDouble("content.width", -1); final double prefHeight = pref.getDouble("content.height", -1); final double prefDividerPosition = pref.getDouble("content.divider.position", 0.3); final SplitPane splitPane = new SplitPane(rulebox, subitems); splitPane.setOrientation(Orientation.HORIZONTAL); splitPane.setDividerPositions(prefDividerPosition); if ( prefWidth > 0 && prefHeight > 0 ) { splitPane.setPrefSize(prefWidth, prefHeight); } // Select the first rule if ( !rules_table.getItems().isEmpty() ) { Platform.runLater(() -> { rules_table.getSelectionModel().select(0); rules_table.requestFocus(); }); } else { Platform.runLater(() -> btn_add_rule.requestFocus()); } return splitPane; } /** * @return Node for UI elements that edit the rules */ private Node createRulesTable ( ) { // Create table with editable rule 'name' column final TableColumn<RuleItem, String> name_col = new TableColumn<>(Messages.RulesDialog_ColName); name_col.setCellValueFactory(new PropertyValueFactory<RuleItem, String>("name")); name_col.setCellFactory(list -> new TextFieldTableCell<RuleItem, String>(new DefaultStringConverter()) { private final ChangeListener<? super Boolean> focusedListener = ( ob, o, n ) -> { if ( !n ) { cancelEdit(); } }; @Override public void cancelEdit ( ) { ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); super.cancelEdit(); } @Override public void commitEdit ( final String newValue ) { ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); super.commitEdit(newValue); Platform.runLater( ( ) -> btn_add_pv.requestFocus()); } @Override public void startEdit ( ) { super.startEdit(); ( (TextField) getGraphic() ).focusedProperty().addListener(focusedListener); } }); name_col.setOnEditCommit(event -> { final int row = event.getTablePosition().getRow(); rule_items.get(row).name.set(event.getNewValue()); fixupRules(row); }); rules_table = new TableView<>(rule_items); rules_table.getColumns().add(name_col); rules_table.setEditable(true); rules_table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); rules_table.setTooltip(new Tooltip(Messages.RulesDialog_RulesTT)); rules_table.setPlaceholder(new Label(Messages.RulesDialog_NoRules)); // Buttons btn_add_rule = new Button(Messages.Add, JFXUtil.getIcon("add.png")); btn_add_rule.setMaxWidth(Double.MAX_VALUE); btn_add_rule.setAlignment(Pos.CENTER_LEFT); btn_add_rule.setOnAction(event -> { RuleItem newItem = new RuleItem( attached_widget, ( selected_rule_item == null ) ? ( ( propinfo_ls.size() == 0 ) ? "" : propinfo_ls.get(0).getPropID() ) : selected_rule_item.prop_id.get() ); rule_items.add(newItem); rules_table.getSelectionModel().select(newItem); final int newRow = rules_table.getSelectionModel().getSelectedIndex(); ModelThreadPool.getTimer().schedule( ( ) -> { Platform.runLater( ( ) -> rules_table.edit(newRow, name_col)); }, 123, TimeUnit.MILLISECONDS); }); btn_remove_rule = new Button(Messages.Remove, JFXUtil.getIcon("delete.png")); btn_remove_rule.setMaxWidth(Double.MAX_VALUE); btn_remove_rule.setAlignment(Pos.CENTER_LEFT); btn_remove_rule.setDisable(true); btn_remove_rule.setOnAction(event -> { final int sel = rules_table.getSelectionModel().getSelectedIndex(); if ( sel >= 0 ) { rule_items.remove(sel); fixupRules(sel); } }); btn_move_rule_up = new Button(Messages.MoveUp, JFXUtil.getIcon("up.png")); btn_move_rule_up.setMaxWidth(Double.MAX_VALUE); btn_move_rule_up.setAlignment(Pos.CENTER_LEFT); btn_move_rule_up.setDisable(true); btn_move_rule_up.setOnAction(event -> TableHelper.move_item_up(rules_table, rule_items)); btn_move_rule_down = new Button(Messages.MoveDown, JFXUtil.getIcon("down.png")); btn_move_rule_down.setMaxWidth(Double.MAX_VALUE); btn_move_rule_down.setAlignment(Pos.CENTER_LEFT); btn_move_rule_down.setDisable(true); btn_move_rule_down.setOnAction(event -> TableHelper.move_item_down(rules_table, rule_items)); btn_dup_rule = new Button(Messages.Duplicate, JFXUtil.getIcon("file-duplicate.png")); btn_dup_rule.setMaxWidth(Double.MAX_VALUE); btn_dup_rule.setAlignment(Pos.CENTER_LEFT); btn_dup_rule.setDisable(true); btn_dup_rule.setOnAction(event -> { if ( selected_rule_item != null ) { RuleItem newItem = RuleItem.forInfo(attached_widget, selected_rule_item.getRuleInfo(), undo); if ( !newItem.nameProperty().get().endsWith(" (duplicate)") ) { newItem.nameProperty().set(newItem.nameProperty().get() + " (duplicate)"); } rule_items.add(newItem); rules_table.getSelectionModel().select(newItem); final int newRow = rules_table.getSelectionModel().getSelectedIndex(); ModelThreadPool.getTimer().schedule( ( ) -> { Platform.runLater( ( ) -> rules_table.edit(newRow, name_col)); }, 123, TimeUnit.MILLISECONDS); } }); btn_show_script = new Button(Messages.RulesDialog_ShowScript, JFXUtil.getIcon("file.png")); btn_show_script.setMaxWidth(Double.MAX_VALUE); btn_show_script.setMinWidth(120); btn_dup_rule.setAlignment(Pos.CENTER_LEFT); btn_show_script.setDisable(true); btn_show_script.setOnAction(event -> { final int sel = rules_table.getSelectionModel().getSelectedIndex(); if ( sel >= 0 ) { final String content = rule_items.get(sel).getRuleInfo().getTextPy(attached_widget); final SyntaxHighlightedMultiLineInputDialog dialog = new SyntaxHighlightedMultiLineInputDialog( btn_show_script, content, Language.Python, false ); DialogHelper.positionDialog(dialog, btn_show_script, -200, -300); dialog.setTextHeight(600); dialog.show(); } }); final VBox buttons = new VBox(10, btn_add_rule, btn_remove_rule, btn_move_rule_up, btn_move_rule_down, new Pane(), btn_dup_rule, btn_show_script); final HBox content = new HBox(10, rules_table, buttons); HBox.setHgrow(rules_table, Priority.ALWAYS); HBox.setHgrow(buttons, Priority.NEVER); return content; } /** Fix rules data: Delete empty rows in middle * @param changed_row Row to check, and remove if it's empty */ private void fixupRules(final int changed_row) { // Check if edited row is now empty and should be deleted if (changed_row < rule_items.size()) { final RuleItem item = rule_items.get(changed_row); if (item.nameProperty().get().trim().isEmpty()) rule_items.remove(changed_row); } } /** @return Node for UI elements that edit the expressions */ private HBox createExpressionsTable ( ) { // Create table with editable rule 'bool expression' column final TableColumn<ExprItem<?>, String> bool_exp_col = new TableColumn<>(Messages.RulesDialog_ColBoolExp); bool_exp_col.setSortable(false); bool_exp_col.setCellValueFactory(new PropertyValueFactory<ExprItem<?>, String>("boolExp")); bool_exp_col.setCellFactory(tableColumn -> new TextFieldTableCell<ExprItem<?>, String>(new DefaultStringConverter()) { private final ChangeListener<? super Boolean> focusedListener = ( ob, o, n ) -> { if ( !n ) { cancelEdit(); } }; /* Instance initializer. */ { setAlignment(Pos.CENTER_LEFT); } @Override public void cancelEdit ( ) { ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); super.cancelEdit(); } @Override public void commitEdit ( final String newValue ) { ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); super.commitEdit(newValue); } @Override public void startEdit ( ) { super.startEdit(); ( (TextField) getGraphic() ).focusedProperty().addListener(focusedListener); } }); // Create table with editable rule 'value expression' column final TableColumn<ExprItem<?>, Node> val_exp_col = new TableColumn<>(Messages.RulesDialog_ColValExp); // This statement requires "val_exp_col" be defined. bool_exp_col.setOnEditCommit(event -> { final int row = event.getTablePosition().getRow(); expression_items.get(row).boolExpProperty().set(event.getNewValue()); ModelThreadPool.getTimer().schedule(() ->{ Platform.runLater(() -> { val_exp_col.getCellData(row).requestFocus(); }); }, 123, TimeUnit.MILLISECONDS); }); val_exp_col.setSortable(false); val_exp_col.setCellValueFactory(new PropertyValueFactory<ExprItem<?>, Node>("field")); val_exp_col.setCellFactory(tableColumn -> new TableCell<ExprItem<?>, Node>() { @Override protected void updateItem ( Node item, boolean empty ) { // calling super here is very important - don't skip this! super.updateItem(item, empty); setGraphic(item); } }); val_exp_col.setOnEditCommit(event -> { final int row = event.getTablePosition().getRow(); expression_items.get(row).fieldProperty().set(event.getNewValue()); event.consume(); ModelThreadPool.getTimer().schedule(() ->{ Platform.runLater(() -> { btn_add_exp.requestFocus(); }); }, 1230, TimeUnit.MILLISECONDS); }); expressions_table = new TableView<>(expression_items); expressions_table.getColumns().add(bool_exp_col); expressions_table.getColumns().add(val_exp_col); expressions_table.setEditable(true); expressions_table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); expressions_table.setTooltip(new Tooltip(Messages.RulesDialog_ExpressionsTT)); expressions_table.setPlaceholder(new Label(Messages.RulesDialog_NoExpressions)); // Buttons btn_add_exp = new Button(Messages.Add, JFXUtil.getIcon("add.png")); btn_add_exp.setMaxWidth(Double.MAX_VALUE); btn_add_exp.setAlignment(Pos.CENTER_LEFT); btn_add_exp.setOnAction(event -> { selected_rule_item.addNewExpr(undo); expression_items.setAll(selected_rule_item.expressions); expressions_table.getSelectionModel().select(expression_items.size() - 1); final int newRow = expression_items.size() - 1; ModelThreadPool.getTimer().schedule(() ->{ Platform.runLater(() -> expressions_table.edit(newRow, bool_exp_col)); }, 123, TimeUnit.MILLISECONDS); }); btn_rm_exp = new Button(Messages.Remove, JFXUtil.getIcon("delete.png")); btn_rm_exp.setMaxWidth(Double.MAX_VALUE); btn_rm_exp.setMinWidth(96); btn_rm_exp.setAlignment(Pos.CENTER_LEFT); btn_rm_exp.setDisable(true); btn_rm_exp.setOnAction(event -> { final int sel = expressions_table.getSelectionModel().getSelectedIndex(); if ( sel >= 0 ) { expression_items.remove(sel); } }); btn_move_exp_up = new Button(Messages.MoveUp, JFXUtil.getIcon("up.png")); btn_move_exp_up.setMaxWidth(Double.MAX_VALUE); btn_move_exp_up.setAlignment(Pos.CENTER_LEFT); btn_move_exp_up.setDisable(true); btn_move_exp_up.setOnAction(event -> TableHelper.move_item_up(expressions_table, expression_items)); btn_move_exp_down = new Button(Messages.MoveDown, JFXUtil.getIcon("down.png")); btn_move_exp_down.setMaxWidth(Double.MAX_VALUE); btn_move_exp_down.setAlignment(Pos.CENTER_LEFT); btn_move_exp_down.setDisable(true); btn_move_exp_down.setOnAction(event -> TableHelper.move_item_down(expressions_table, expression_items)); final VBox buttons = new VBox(10, btn_add_exp, btn_rm_exp, btn_move_exp_up, btn_move_exp_down); final HBox content = new HBox(10, expressions_table, buttons); HBox.setHgrow(expressions_table, Priority.ALWAYS); HBox.setHgrow(buttons, Priority.NEVER); content.setDisable(true); return content; } /** @return Node for UI elements that edit the PVs of a rule */ private HBox createPVsTable ( ) { final TableColumn<PVTableItem, Integer> indexColumn = new TableColumn<>("#"); indexColumn.setEditable(false); indexColumn.setSortable(false); indexColumn.setCellFactory(new LineNumberTableCellFactory<>(true)); indexColumn.setMaxWidth(26); indexColumn.setMinWidth(26); // Create table with editable 'name' column final TableColumn<PVTableItem, String> name_col = new TableColumn<>(Messages.ScriptsDialog_ColPV); name_col.setSortable(false); name_col.setCellValueFactory(new PropertyValueFactory<PVTableItem, String>("name")); name_col.setCellFactory(col -> new AutoCompletedTableCell(menu, btn_add_pv)); name_col.setOnEditCommit(event -> { final int row = event.getTablePosition().getRow(); pv_items.get(row).nameProperty().set(event.getNewValue()); fixupPVs(row); }); // Table column for 'trigger' uses CheckBoxTableCell that directly // modifies the Observable Property final TableColumn<PVTableItem, Boolean> trigger_col = new TableColumn<>(Messages.ScriptsDialog_ColTrigger); trigger_col.setSortable(false); trigger_col.setCellValueFactory(new PropertyValueFactory<PVTableItem, Boolean>("trigger")); trigger_col.setCellFactory(CheckBoxTableCell.<PVTableItem> forTableColumn(trigger_col)); trigger_col.setResizable(false); trigger_col.setMaxWidth(70); trigger_col.setMinWidth(70); pvs_table = new TableView<>(pv_items); pvs_table.getColumns().add(indexColumn); pvs_table.getColumns().add(name_col); pvs_table.getColumns().add(trigger_col); pvs_table.setEditable(true); pvs_table.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY); pvs_table.setTooltip(new Tooltip(Messages.RulesDialog_PVsTT)); pvs_table.setPlaceholder(new Label(Messages.RulesDialog_NoPVs)); // Buttons btn_add_pv = new Button(Messages.Add, JFXUtil.getIcon("add.png")); btn_add_pv.setMaxWidth(Double.MAX_VALUE); btn_add_pv.setAlignment(Pos.CENTER_LEFT); btn_add_pv.setOnAction(event -> { final PVTableItem newItem = new PVTableItem("new-PV", true); pv_items.add(newItem); pvs_table.getSelectionModel().select(newItem); final int newRow = pvs_table.getSelectionModel().getSelectedIndex(); ModelThreadPool.getTimer().schedule(() ->{ Platform.runLater(() -> pvs_table.edit(newRow, name_col)); }, 123, TimeUnit.MILLISECONDS); }); btn_rm_pv = new Button(Messages.Remove, JFXUtil.getIcon("delete.png")); btn_rm_pv.setMaxWidth(Double.MAX_VALUE); btn_rm_pv.setMinWidth(96); btn_rm_pv.setAlignment(Pos.CENTER_LEFT); btn_rm_pv.setDisable(true); btn_rm_pv.setOnAction(event -> { final int sel = pvs_table.getSelectionModel().getSelectedIndex(); if ( sel >= 0 ) { pv_items.remove(sel); fixupPVs(sel); } }); btn_move_pv_up = new Button(Messages.MoveUp, JFXUtil.getIcon("up.png")); btn_move_pv_up.setMaxWidth(Double.MAX_VALUE); btn_move_pv_up.setAlignment(Pos.CENTER_LEFT); btn_move_pv_up.setDisable(true); btn_move_pv_up.setOnAction(event -> TableHelper.move_item_up(pvs_table, pv_items)); btn_move_pv_down = new Button(Messages.MoveDown, JFXUtil.getIcon("down.png")); btn_move_pv_down.setMaxWidth(Double.MAX_VALUE); btn_move_pv_down.setAlignment(Pos.CENTER_LEFT); btn_move_pv_down.setDisable(true); btn_move_pv_down.setOnAction(event -> TableHelper.move_item_down(pvs_table, pv_items)); final VBox buttons = new VBox(10, btn_add_pv, btn_rm_pv, btn_move_pv_up, btn_move_pv_down); final HBox content = new HBox(10, pvs_table, buttons); HBox.setHgrow(pvs_table, Priority.ALWAYS); HBox.setHgrow(buttons, Priority.NEVER); content.setDisable(true); return content; } /** Fix PVs data: Delete empty rows in middle * @param changed_row Row to check, and remove if it's empty */ private void fixupPVs(final int changed_row) { // Check if edited row is now empty and should be deleted if (changed_row < pv_items.size()) { final PVTableItem item = pv_items.get(changed_row); if (item.nameProperty().get().trim().isEmpty()) pv_items.remove(changed_row); } } }
RulesDialog: Align with phoebus version
org.csstudio.display.builder.editor/src/org/csstudio/display/builder/editor/properties/RulesDialog.java
RulesDialog: Align with phoebus version
<ide><path>rg.csstudio.display.builder.editor/src/org/csstudio/display/builder/editor/properties/RulesDialog.java <ide> <ide> /** Dialog for editing {@link RuleInfo}s <ide> * @author Megan Grodowitz <add> * @author Claudio Rosati <add> * @author Kay Kasemir <ide> */ <ide> @SuppressWarnings("nls") <ide> public class RulesDialog extends Dialog<List<RuleInfo>> <ide> { <del> <ide> /** Expression info as property-based item for table */ <ide> public abstract static class ExprItem<T> <ide> { <ide> abstract public ExpressionInfo<T> toExprInfo(); <ide> abstract public T getPropVal(); <ide> }; <del> <ide> <ide> public static class ExprItemString extends ExprItem<String> <ide> { <ide> this.undo = undo; <ide> this.attached_widget = attached_widget; <ide> this.menu = menu; <add> <ide> this.propinfo_ls = RuleInfo.getTargettableProperties(attached_widget); <ide> <ide> setTitle(Messages.RulesDialog_Title); <ide> final HBox exprs = createExpressionsTable(); <ide> <ide> // Display PVs of currently selected rule <del> rules_table.getSelectionModel().selectedItemProperty().addListener( ( prop, old, selected ) -> { <del> <add> rules_table.getSelectionModel().selectedItemProperty().addListener( (prop, old, selected) -> <add> { <ide> selected_rule_item = selected; <del> <del> if ( selected == null ) { <del> <add> if (selected == null) <add> { <ide> pvs.setDisable(true); <ide> exprs.setDisable(true); <del> <ide> btn_remove_rule.setDisable(true); <ide> btn_dup_rule.setDisable(true); <ide> btn_move_rule_up.setDisable(true); <ide> propComboBox.setDisable(true); <ide> propComboBox.getSelectionModel().select(null); <ide> valExpBox.setDisable(true); <del> <ide> pv_items.clear(); <ide> expression_items.clear(); <del> <del> } else { <del> <add> } <add> else <add> { <ide> pvs.setDisable(false); <ide> exprs.setDisable(false); <ide> <del> TableViewSelectionModel<RuleItem> model = rules_table.getSelectionModel(); <del> <add> final TableViewSelectionModel<RuleItem> model = rules_table.getSelectionModel(); <ide> btn_remove_rule.setDisable(false); <ide> btn_dup_rule.setDisable(false); <ide> btn_move_rule_up.setDisable(model.getSelectedIndex() == 0); <ide> propComboBox.getSelectionModel().select(getPropLongString(selected)); <ide> valExpBox.setDisable(false); <ide> valExpBox.selectedProperty().set(selected.prop_as_expr.get()); <del> <ide> pv_items.setAll(selected.pvs); <ide> expression_items.setAll(selected.expressions); <del> <ide> fixupPVs(0); <del> <del> } <del> <add> } <ide> }); <ide> <ide> // Update PVs of selected rule from PVs table <del> final ListChangeListener<PVTableItem> pll = change -> { <del> <add> final ListChangeListener<PVTableItem> pll = change -> <add> { <ide> final RuleItem selected = rules_table.getSelectionModel().getSelectedItem(); <del> <del> if ( selected != null ) { <add> if (selected != null) <ide> selected.pvs = new ArrayList<>(change.getList()); <del> } <del> <ide> }; <ide> pv_items.addListener(pll); <ide> <ide> // Update buttons for currently selected PV <del> pvs_table.getSelectionModel().selectedItemProperty().addListener( ( prop, old, selected ) -> { <del> if ( selected == null ) { <add> pvs_table.getSelectionModel().selectedItemProperty().addListener( (prop, old, selected) -> <add> { <add> if (selected == null) <add> { <ide> btn_rm_pv.setDisable(true); <ide> btn_move_pv_up.setDisable(true); <ide> btn_move_pv_down.setDisable(true); <del> } else { <del> <del> TableViewSelectionModel<PVTableItem> model = pvs_table.getSelectionModel(); <del> <add> } <add> else <add> { <add> final TableViewSelectionModel<PVTableItem> model = pvs_table.getSelectionModel(); <ide> btn_rm_pv.setDisable(false); <ide> btn_move_pv_up.setDisable(model.getSelectedIndex() == 0); <ide> btn_move_pv_down.setDisable(model.getSelectedIndex() == pv_items.size() - 1); <del> <ide> } <ide> }); <ide> <ide> // Update Expressions of selected rule from Expressions table <del> final ListChangeListener<ExprItem<?>> ell = change -> { <del> <add> final ListChangeListener<ExprItem<?>> ell = change -> <add> { <ide> final RuleItem selected = rules_table.getSelectionModel().getSelectedItem(); <del> <del> if (selected != null) { <add> if (selected != null) <ide> selected.expressions = new ArrayList<>(change.getList()); <del> } <del> <ide> }; <ide> expression_items.addListener(ell); <ide> <ide> // Update buttons for currently selected expression <del> expressions_table.getSelectionModel().selectedItemProperty().addListener( ( prop, old, selected ) -> { <del> if ( selected == null ) { <add> expressions_table.getSelectionModel().selectedItemProperty().addListener( (prop, old, selected) -> <add> { <add> if (selected == null) <add> { <ide> btn_rm_exp.setDisable(true); <ide> btn_move_exp_up.setDisable(true); <ide> btn_move_exp_down.setDisable(true); <del> } else { <del> <del> TableViewSelectionModel<ExprItem<?>> model = expressions_table.getSelectionModel(); <del> <add> } <add> else <add> { <add> final TableViewSelectionModel<ExprItem<?>> model = expressions_table.getSelectionModel(); <ide> btn_rm_exp.setDisable(false); <ide> btn_move_exp_up.setDisable(model.getSelectedIndex() == 0); <ide> btn_move_exp_down.setDisable(model.getSelectedIndex() == expression_items.size() - 1); <del> <ide> } <ide> }); <ide> <ide> } <ide> propComboBox = new ComboBox<String>(prop_id_opts); <ide> propComboBox.setDisable(true); <del> propComboBox.getSelectionModel().selectedIndexProperty().addListener( (p, o, index) -> { <del> // Select property info based on index within combo. <del> int idx = index.intValue(); <del> <del> if ( idx >= 0 ) { <del> <del> PropInfo prop = propinfo_ls.get(idx); <del> <del> if ( selected_rule_item.tryUpdatePropID(undo, prop.getPropID()) ) { <add> propComboBox.getSelectionModel().selectedIndexProperty().addListener( (p, o, index) -> <add> { // Select property info based on index within combo. <add> final int idx = index.intValue(); <add> if (idx >= 0) <add> { <add> final PropInfo prop = propinfo_ls.get(idx); <add> if (selected_rule_item.tryUpdatePropID(undo, prop.getPropID())) <ide> expression_items.setAll(selected_rule_item.expressions); <del> } <del> <del> } <del> <add> } <ide> }); <ide> propComboBox.setMinHeight(27); <ide> propComboBox.setMaxWidth(Double.MAX_VALUE); <ide> splitPane.setOrientation(Orientation.HORIZONTAL); <ide> splitPane.setDividerPositions(prefDividerPosition); <ide> <del> if ( prefWidth > 0 && prefHeight > 0 ) { <add> if (prefWidth > 0 && prefHeight > 0) <ide> splitPane.setPrefSize(prefWidth, prefHeight); <del> } <ide> <ide> // Select the first rule <del> if ( !rules_table.getItems().isEmpty() ) { <del> Platform.runLater(() -> { <add> if (!rules_table.getItems().isEmpty()) <add> { <add> Platform.runLater(() -> <add> { <ide> rules_table.getSelectionModel().select(0); <ide> rules_table.requestFocus(); <ide> }); <del> } else { <add> } <add> else <ide> Platform.runLater(() -> btn_add_rule.requestFocus()); <del> } <ide> <ide> return splitPane; <del> <ide> } <ide> <del> /** <del> * @return Node for UI elements that edit the rules <del> */ <del> private Node createRulesTable ( ) { <del> <add> /** @return Node for UI elements that edit the rules */ <add> private Node createRulesTable () <add> { <ide> // Create table with editable rule 'name' column <ide> final TableColumn<RuleItem, String> name_col = new TableColumn<>(Messages.RulesDialog_ColName); <ide> <ide> name_col.setCellValueFactory(new PropertyValueFactory<RuleItem, String>("name")); <del> name_col.setCellFactory(list -> new TextFieldTableCell<RuleItem, String>(new DefaultStringConverter()) { <del> <del> private final ChangeListener<? super Boolean> focusedListener = ( ob, o, n ) -> { <del> if ( !n ) { <add> name_col.setCellFactory(list -> new TextFieldTableCell<RuleItem, String>(new DefaultStringConverter()) <add> { <add> private final ChangeListener<? super Boolean> focusedListener = (ob, o, n) -> <add> { <add> if (!n) <ide> cancelEdit(); <del> } <ide> }; <ide> <ide> @Override <del> public void cancelEdit ( ) { <add> public void cancelEdit() <add> { <ide> ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); <ide> super.cancelEdit(); <ide> } <ide> <ide> @Override <del> public void commitEdit ( final String newValue ) { <add> public void commitEdit(final String newValue) <add> { <ide> ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); <ide> super.commitEdit(newValue); <ide> Platform.runLater( ( ) -> btn_add_pv.requestFocus()); <ide> } <ide> <ide> @Override <del> public void startEdit ( ) { <add> public void startEdit() <add> { <ide> super.startEdit(); <ide> ( (TextField) getGraphic() ).focusedProperty().addListener(focusedListener); <ide> } <del> <del> }); <del> name_col.setOnEditCommit(event -> { <del> <add> }); <add> name_col.setOnEditCommit(event -> <add> { <ide> final int row = event.getTablePosition().getRow(); <del> <ide> rule_items.get(row).name.set(event.getNewValue()); <ide> fixupRules(row); <del> <ide> }); <ide> <ide> rules_table = new TableView<>(rule_items); <ide> btn_add_rule = new Button(Messages.Add, JFXUtil.getIcon("add.png")); <ide> btn_add_rule.setMaxWidth(Double.MAX_VALUE); <ide> btn_add_rule.setAlignment(Pos.CENTER_LEFT); <del> btn_add_rule.setOnAction(event -> { <del> <del> RuleItem newItem = new RuleItem( <add> btn_add_rule.setOnAction(event -> <add> { <add> final RuleItem newItem = new RuleItem( <ide> attached_widget, <del> ( selected_rule_item == null ) <del> ? ( ( propinfo_ls.size() == 0 ) ? "" : propinfo_ls.get(0).getPropID() ) <del> : selected_rule_item.prop_id.get() <add> selected_rule_item == null <add> ? ( ( propinfo_ls.size() == 0 ) ? "" : propinfo_ls.get(0).getPropID() ) <add> : selected_rule_item.prop_id.get() <ide> ); <ide> <ide> rule_items.add(newItem); <ide> <ide> final int newRow = rules_table.getSelectionModel().getSelectedIndex(); <ide> <del> ModelThreadPool.getTimer().schedule( ( ) -> { <del> Platform.runLater( ( ) -> rules_table.edit(newRow, name_col)); <add> ModelThreadPool.getTimer().schedule(() -> <add> { <add> Platform.runLater(() -> rules_table.edit(newRow, name_col)); <ide> }, 123, TimeUnit.MILLISECONDS); <del> <ide> }); <ide> <ide> btn_remove_rule = new Button(Messages.Remove, JFXUtil.getIcon("delete.png")); <ide> btn_remove_rule.setMaxWidth(Double.MAX_VALUE); <ide> btn_remove_rule.setAlignment(Pos.CENTER_LEFT); <ide> btn_remove_rule.setDisable(true); <del> btn_remove_rule.setOnAction(event -> { <add> btn_remove_rule.setOnAction(event -> <add> { <ide> final int sel = rules_table.getSelectionModel().getSelectedIndex(); <del> if ( sel >= 0 ) { <add> if (sel >= 0) <add> { <ide> rule_items.remove(sel); <ide> fixupRules(sel); <ide> } <ide> btn_dup_rule.setMaxWidth(Double.MAX_VALUE); <ide> btn_dup_rule.setAlignment(Pos.CENTER_LEFT); <ide> btn_dup_rule.setDisable(true); <del> btn_dup_rule.setOnAction(event -> { <del> if ( selected_rule_item != null ) { <del> <del> RuleItem newItem = RuleItem.forInfo(attached_widget, selected_rule_item.getRuleInfo(), undo); <del> <del> if ( !newItem.nameProperty().get().endsWith(" (duplicate)") ) { <add> btn_dup_rule.setOnAction(event -> <add> { <add> if (selected_rule_item != null) <add> { <add> final RuleItem newItem = RuleItem.forInfo(attached_widget, selected_rule_item.getRuleInfo(), undo); <add> <add> if (!newItem.nameProperty().get().endsWith(" (duplicate)")) <ide> newItem.nameProperty().set(newItem.nameProperty().get() + " (duplicate)"); <del> } <ide> <ide> rule_items.add(newItem); <ide> rules_table.getSelectionModel().select(newItem); <ide> <ide> final int newRow = rules_table.getSelectionModel().getSelectedIndex(); <ide> <del> ModelThreadPool.getTimer().schedule( ( ) -> { <add> ModelThreadPool.getTimer().schedule(() -> <add> { <ide> Platform.runLater( ( ) -> rules_table.edit(newRow, name_col)); <ide> }, 123, TimeUnit.MILLISECONDS); <del> <ide> } <ide> }); <ide> <ide> btn_show_script.setMinWidth(120); <ide> btn_dup_rule.setAlignment(Pos.CENTER_LEFT); <ide> btn_show_script.setDisable(true); <del> btn_show_script.setOnAction(event -> { <del> <add> btn_show_script.setOnAction(event -> <add> { <ide> final int sel = rules_table.getSelectionModel().getSelectedIndex(); <del> <del> if ( sel >= 0 ) { <del> <add> if (sel >= 0) <add> { <ide> final String content = rule_items.get(sel).getRuleInfo().getTextPy(attached_widget); <ide> final SyntaxHighlightedMultiLineInputDialog dialog = new SyntaxHighlightedMultiLineInputDialog( <del> btn_show_script, <del> content, <del> Language.Python, <del> false <del> ); <del> <add> btn_show_script, <add> content, <add> Language.Python, <add> false <add> ); <ide> DialogHelper.positionDialog(dialog, btn_show_script, -200, -300); <ide> dialog.setTextHeight(600); <ide> dialog.show(); <del> <del> } <del> <add> } <ide> }); <ide> <ide> final VBox buttons = new VBox(10, btn_add_rule, btn_remove_rule, btn_move_rule_up, btn_move_rule_down, new Pane(), btn_dup_rule, btn_show_script); <ide> HBox.setHgrow(buttons, Priority.NEVER); <ide> <ide> return content; <del> <ide> } <ide> <ide> /** Fix rules data: Delete empty rows in middle <ide> } <ide> <ide> /** @return Node for UI elements that edit the expressions */ <del> private HBox createExpressionsTable ( ) { <del> <add> private HBox createExpressionsTable () <add> { <ide> // Create table with editable rule 'bool expression' column <ide> final TableColumn<ExprItem<?>, String> bool_exp_col = new TableColumn<>(Messages.RulesDialog_ColBoolExp); <ide> bool_exp_col.setSortable(false); <ide> bool_exp_col.setCellValueFactory(new PropertyValueFactory<ExprItem<?>, String>("boolExp")); <del> bool_exp_col.setCellFactory(tableColumn -> new TextFieldTableCell<ExprItem<?>, String>(new DefaultStringConverter()) { <del> <del> private final ChangeListener<? super Boolean> focusedListener = ( ob, o, n ) -> { <del> if ( !n ) { <add> bool_exp_col.setCellFactory(tableColumn -> new TextFieldTableCell<ExprItem<?>, String>(new DefaultStringConverter()) <add> { <add> private final ChangeListener<? super Boolean> focusedListener = (ob, o, n) -> <add> { <add> if (!n) <ide> cancelEdit(); <del> } <ide> }; <ide> <ide> /* Instance initializer. */ <ide> } <ide> <ide> @Override <del> public void cancelEdit ( ) { <add> public void cancelEdit() <add> { <ide> ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); <ide> super.cancelEdit(); <ide> } <ide> <ide> @Override <del> public void commitEdit ( final String newValue ) { <add> public void commitEdit (final String newValue) <add> { <ide> ( (TextField) getGraphic() ).focusedProperty().removeListener(focusedListener); <ide> super.commitEdit(newValue); <ide> } <ide> <ide> @Override <del> public void startEdit ( ) { <add> public void startEdit() <add> { <ide> super.startEdit(); <ide> ( (TextField) getGraphic() ).focusedProperty().addListener(focusedListener); <ide> } <del> <ide> }); <ide> <ide> // Create table with editable rule 'value expression' column <ide> final TableColumn<ExprItem<?>, Node> val_exp_col = new TableColumn<>(Messages.RulesDialog_ColValExp); <ide> <ide> // This statement requires "val_exp_col" be defined. <del> bool_exp_col.setOnEditCommit(event -> { <del> <add> bool_exp_col.setOnEditCommit(event -> <add> { <ide> final int row = event.getTablePosition().getRow(); <ide> <ide> expression_items.get(row).boolExpProperty().set(event.getNewValue()); <del> ModelThreadPool.getTimer().schedule(() ->{ <del> Platform.runLater(() -> { <del> val_exp_col.getCellData(row).requestFocus(); <del> }); <add> ModelThreadPool.getTimer().schedule(() -> <add> { <add> Platform.runLater(() -> val_exp_col.getCellData(row).requestFocus()); <ide> }, 123, TimeUnit.MILLISECONDS); <del> <ide> }); <ide> <ide> val_exp_col.setSortable(false); <ide> val_exp_col.setCellValueFactory(new PropertyValueFactory<ExprItem<?>, Node>("field")); <del> val_exp_col.setCellFactory(tableColumn -> new TableCell<ExprItem<?>, Node>() { <add> val_exp_col.setCellFactory(tableColumn -> new TableCell<ExprItem<?>, Node>() <add> { <ide> @Override <del> protected void updateItem ( Node item, boolean empty ) { <add> protected void updateItem (final Node item, final boolean empty) <add> { <ide> // calling super here is very important - don't skip this! <ide> super.updateItem(item, empty); <ide> setGraphic(item); <ide> } <ide> }); <del> val_exp_col.setOnEditCommit(event -> { <del> <add> val_exp_col.setOnEditCommit(event -> <add> { <ide> final int row = event.getTablePosition().getRow(); <ide> <ide> expression_items.get(row).fieldProperty().set(event.getNewValue()); <ide> event.consume(); <del> ModelThreadPool.getTimer().schedule(() ->{ <del> Platform.runLater(() -> { <del> btn_add_exp.requestFocus(); <del> }); <add> ModelThreadPool.getTimer().schedule(() -> <add> { <add> Platform.runLater(() -> btn_add_exp.requestFocus()); <ide> }, 1230, TimeUnit.MILLISECONDS); <del> <ide> }); <ide> <ide> expressions_table = new TableView<>(expression_items); <ide> btn_add_exp = new Button(Messages.Add, JFXUtil.getIcon("add.png")); <ide> btn_add_exp.setMaxWidth(Double.MAX_VALUE); <ide> btn_add_exp.setAlignment(Pos.CENTER_LEFT); <del> btn_add_exp.setOnAction(event -> { <del> <add> btn_add_exp.setOnAction(event -> <add> { <ide> selected_rule_item.addNewExpr(undo); <ide> expression_items.setAll(selected_rule_item.expressions); <ide> <ide> <ide> final int newRow = expression_items.size() - 1; <ide> <del> ModelThreadPool.getTimer().schedule(() ->{ <add> ModelThreadPool.getTimer().schedule(() -> <add> { <ide> Platform.runLater(() -> expressions_table.edit(newRow, bool_exp_col)); <ide> }, 123, TimeUnit.MILLISECONDS); <del> <ide> }); <ide> <ide> btn_rm_exp = new Button(Messages.Remove, JFXUtil.getIcon("delete.png")); <ide> btn_rm_exp.setMinWidth(96); <ide> btn_rm_exp.setAlignment(Pos.CENTER_LEFT); <ide> btn_rm_exp.setDisable(true); <del> btn_rm_exp.setOnAction(event -> { <del> <add> btn_rm_exp.setOnAction(event -> <add> { <ide> final int sel = expressions_table.getSelectionModel().getSelectedIndex(); <del> <del> if ( sel >= 0 ) { <add> if (sel >= 0) <ide> expression_items.remove(sel); <del> } <del> <ide> }); <ide> <ide> btn_move_exp_up = new Button(Messages.MoveUp, JFXUtil.getIcon("up.png")); <ide> } <ide> <ide> /** @return Node for UI elements that edit the PVs of a rule */ <del> private HBox createPVsTable ( ) { <del> <add> private HBox createPVsTable() <add> { <ide> final TableColumn<PVTableItem, Integer> indexColumn = new TableColumn<>("#"); <ide> indexColumn.setEditable(false); <ide> indexColumn.setSortable(false); <ide> name_col.setSortable(false); <ide> name_col.setCellValueFactory(new PropertyValueFactory<PVTableItem, String>("name")); <ide> name_col.setCellFactory(col -> new AutoCompletedTableCell(menu, btn_add_pv)); <del> name_col.setOnEditCommit(event -> { <del> <add> name_col.setOnEditCommit(event -> <add> { <ide> final int row = event.getTablePosition().getRow(); <del> <ide> pv_items.get(row).nameProperty().set(event.getNewValue()); <ide> fixupPVs(row); <del> <ide> }); <ide> <ide> // Table column for 'trigger' uses CheckBoxTableCell that directly <ide> btn_add_pv = new Button(Messages.Add, JFXUtil.getIcon("add.png")); <ide> btn_add_pv.setMaxWidth(Double.MAX_VALUE); <ide> btn_add_pv.setAlignment(Pos.CENTER_LEFT); <del> btn_add_pv.setOnAction(event -> { <del> <add> btn_add_pv.setOnAction(event -> <add> { <ide> final PVTableItem newItem = new PVTableItem("new-PV", true); <ide> <ide> pv_items.add(newItem); <ide> <ide> final int newRow = pvs_table.getSelectionModel().getSelectedIndex(); <ide> <del> ModelThreadPool.getTimer().schedule(() ->{ <add> ModelThreadPool.getTimer().schedule(() -> <add> { <ide> Platform.runLater(() -> pvs_table.edit(newRow, name_col)); <ide> }, 123, TimeUnit.MILLISECONDS); <ide> <ide> btn_rm_pv.setMinWidth(96); <ide> btn_rm_pv.setAlignment(Pos.CENTER_LEFT); <ide> btn_rm_pv.setDisable(true); <del> btn_rm_pv.setOnAction(event -> { <del> <add> btn_rm_pv.setOnAction(event -> <add> { <ide> final int sel = pvs_table.getSelectionModel().getSelectedIndex(); <del> <del> if ( sel >= 0 ) { <add> if (sel >= 0) <add> { <ide> pv_items.remove(sel); <ide> fixupPVs(sel); <ide> } <del> <ide> }); <ide> <ide> btn_move_pv_up = new Button(Messages.MoveUp, JFXUtil.getIcon("up.png")); <ide> content.setDisable(true); <ide> <ide> return content; <del> <ide> } <ide> <ide> /** Fix PVs data: Delete empty rows in middle
JavaScript
isc
7f431acaf378fc4fc986df151ace9cb9b517236d
0
ameily/mr-issue
var http = require('http'); var util = require('util'); var MrIssueDriver = require('./driver'); var MrIssueConfig = require('./config'); var config = new MrIssueConfig(); var driver = new MrIssueDriver(config, function(err) { if(err) { console.log("Could not initialize Mr. Issue driver: " + err); return; } http.createServer(function(req, res) { // req.url == "/endpoint" var data = ""; req.on('data', function(chunk) { data += chunk; }); req.on('end', function() { var body = null; try { body = JSON.parse(data); } catch(e) { } if(body) { } console.log(req.url); console.log(util.inspect(body, false, null, true)); console.log("\n"); res.writeHead(200, { 'Content-Type': 'application/json' }); res.end('{"ok": true}'); driver.handleWebHook(req.url, body); }); }).listen(8080, "0.0.0.0"); });
app.js
var http = require('http'); var util = require('util'); http.createServer(function(req, res) { // req.url == "/endpoint" var data = ""; req.on('data', function(chunk) { data += chunk; }); req.on('end', function() { var body = null; try { body = JSON.parse(data); } catch(e) { } if(body) { } console.log(req.url); console.log(util.inspect(body, false, null, true)); console.log("\n"); res.writeHead(200, { 'Content-Type': 'application/json' }); res.end('{"ok": true}'); }); }).listen(8080, "0.0.0.0");
added config parsing and request handling
app.js
added config parsing and request handling
<ide><path>pp.js <ide> <ide> var http = require('http'); <ide> var util = require('util'); <add>var MrIssueDriver = require('./driver'); <add>var MrIssueConfig = require('./config'); <ide> <del>http.createServer(function(req, res) { <del> // req.url == "/endpoint" <add>var config = new MrIssueConfig(); <add>var driver = new MrIssueDriver(config, function(err) { <add> if(err) { <add> console.log("Could not initialize Mr. Issue driver: " + err); <add> return; <add> } <ide> <del> var data = ""; <del> req.on('data', function(chunk) { <del> data += chunk; <del> }); <add> http.createServer(function(req, res) { <add> // req.url == "/endpoint" <ide> <del> req.on('end', function() { <del> var body = null; <del> try { <del> body = JSON.parse(data); <del> } catch(e) { <del> } <del> <del> if(body) { <add> var data = ""; <add> req.on('data', function(chunk) { <add> data += chunk; <add> }); <ide> <del> } <add> req.on('end', function() { <add> var body = null; <add> try { <add> body = JSON.parse(data); <add> } catch(e) { <add> } <ide> <del> console.log(req.url); <del> console.log(util.inspect(body, false, null, true)); <del> console.log("\n"); <add> if(body) { <ide> <del> res.writeHead(200, { 'Content-Type': 'application/json' }); <del> res.end('{"ok": true}'); <del> }); <del>}).listen(8080, "0.0.0.0"); <add> } <ide> <add> console.log(req.url); <add> console.log(util.inspect(body, false, null, true)); <add> console.log("\n"); <add> <add> res.writeHead(200, { 'Content-Type': 'application/json' }); <add> res.end('{"ok": true}'); <add> <add> driver.handleWebHook(req.url, body); <add> }); <add> }).listen(8080, "0.0.0.0"); <add>});
JavaScript
apache-2.0
4d54dfde446b63d3ededc0c90a939d48bfd8db77
0
control-center/serviced,control-center/serviced,control-center/serviced,control-center/serviced,control-center/serviced,control-center/serviced,control-center/serviced,control-center/serviced
/* globals controlplane: true */ /* ServiceDetailsController * Displays details of a specific service */ (function () { 'use strict'; // share angular services outside of angular context let $notification, serviceHealth, $q, resourcesFactory, utils; controlplane.controller("ServiceDetailsController", [ "$scope", "$q", "$routeParams", "$location", "resourcesFactory", "authService", "$modalService", "$translate", "$notification", "$timeout", "miscUtils", "hostsFactory", "$serviceHealth", "Service", "poolsFactory", "CCUIState", "$cookies", "areUIReady", "LogSearch", "$filter", function ($scope, _$q, $routeParams, $location, _resourcesFactory, authService, $modalService, $translate, _$notification, $timeout, _utils, hostsFactory, _serviceHealth, Service, poolsFactory, CCUIState, $cookies, areUIReady, LogSearch, $filter) { // api access via angular context $notification = _$notification; serviceHealth = _serviceHealth; $q = _$q; resourcesFactory = _resourcesFactory; utils = _utils; // Ensure logged in authService.checkLogin($scope); $scope.hostsFactory = hostsFactory; $scope.defaultHostAlias = $location.host(); if (utils.needsHostAlias($location.host())) { resourcesFactory.getHostAlias().success(function (data) { $scope.defaultHostAlias = data.hostalias; }); } //add Public Endpoint data $scope.publicEndpoints = { add: {} }; //add service endpoint data $scope.exportedServiceEndpoints = {}; $scope.clickPool = function (id) { resourcesFactory.routeToPool(id); }; $scope.clickHost = function (id) { resourcesFactory.routeToHost(id); }; $scope.modalAddPublicEndpoint = function () { areUIReady.lock(); $scope.protocols = []; $scope.protocols.push({ Label: "HTTPS", UseTLS: true, Protocol: "https" }); $scope.protocols.push({ Label: "HTTP", UseTLS: false, Protocol: "http" }); $scope.protocols.push({ Label: "Other, secure (TLS)", UseTLS: true, Protocol: "" }); $scope.protocols.push({ Label: "Other, non-secure", UseTLS: false, Protocol: "" }); // default public endpoint options $scope.publicEndpoints.add = { type: "port", endpoint: $scope.currentService.exportedServiceEndpoints[0], name: "", host: $scope.defaultHostAlias, port: "", protocol: $scope.protocols[0], }; // returns an error string if newPublicEndpoint's vhost is invalid var validateVHost = function (newPublicEndpoint) { var name = newPublicEndpoint.name; // if no port if (!name || !name.length) { return "Missing Name"; } // if name already exists for (var i in $scope.publicEndpoints.data) { if (name === $scope.publicEndpoints.data[i].Name) { return "Name already exists: " + newPublicEndpoint.name; } } // if invalid characters var re = /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/; if (!re.test(name)) { return $translate.instant("vhost_name_invalid") + " " + newPublicEndpoint.name; } }; // returns an error string if newPublicEndpoint's port is invalid var validatePort = function (newPublicEndpoint) { var host = newPublicEndpoint.host; var port = newPublicEndpoint.port; if (!host || !host.length) { return "Missing host name"; } // if invalid characters var re = /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/; if (!re.test(host)) { return $translate.instant("host_name_invalid") + ": " + host; } // if no port if (!port || !port.length) { return "Missing port"; } if (+port < 1 || +port > 65536) { return "Port must be between 1 and 65536"; } }; $modalService.create({ templateUrl: "add-public-endpoint.html", model: $scope, title: "add_public_endpoint", actions: [ { role: "cancel", action: function () { this.close(); } }, { role: "ok", label: "add_public_endpoint_confirm", action: function () { var newPublicEndpoint = $scope.publicEndpoints.add; if (this.validate(newPublicEndpoint)) { // disable ok button, and store the re-enable function var enableSubmit = this.disableSubmitButton(); $scope.addPublicEndpoint(newPublicEndpoint) .success(function (data, status) { // reload the table refreshEndpoints(); $notification.create("Added public endpoint").success(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Unable to add public endpoint", data.Detail).error(); enableSubmit(); }.bind(this)); } } } ], validate: function (newPublicEndpoint) { // if no service endpoint selected if (!newPublicEndpoint.endpoint) { this.createNotification("Unable to add Public Endpoint", "No service endpoint selected").error(); return false; } // perform type specific validation if (newPublicEndpoint.type === "vhost") { var err = validateVHost(newPublicEndpoint); if (err) { this.createNotification("Unable to add Public Endpoint", err).error(); } else { return true; } } else if (newPublicEndpoint.type === "port") { var err = validatePort(newPublicEndpoint); if (err) { this.createNotification("Unable to add Public Endpoint", err).error(); return false; } else { return true; } } }, onShow: () => { areUIReady.unlock(); } }); }; $scope.addPublicEndpoint = function (newPublicEndpoint) { var serviceId = newPublicEndpoint.endpoint.ServiceID; var serviceName = newPublicEndpoint.endpoint.ServiceName; var serviceEndpoint = newPublicEndpoint.endpoint.Application; if (newPublicEndpoint.type === "vhost") { var vhostName = newPublicEndpoint.name; return resourcesFactory.addVHost(serviceId, serviceName, serviceEndpoint, vhostName); } else if (newPublicEndpoint.type === "port") { var port = newPublicEndpoint.host + ":" + newPublicEndpoint.port; var usetls = newPublicEndpoint.protocol.UseTLS; var protocol = newPublicEndpoint.protocol.Protocol; return resourcesFactory.addPort(serviceId, serviceName, serviceEndpoint, port, usetls, protocol); } }; // modalAssignIP opens a modal view to assign an ip address to a service $scope.modalAssignIP = function (ip, poolID) { let modalScope = $scope.$new(true); modalScope.assignments = { 'ip': ip, 'value': null }; resourcesFactory.getPoolIPs(poolID) .success(function (data) { let options = [{ 'Value': 'Automatic', 'IPAddr': '' }]; let i, IPAddr, value; //host ips if (data && data.HostIPs) { for (i = 0; i < data.HostIPs.length; ++i) { IPAddr = data.HostIPs[i].IPAddress; value = 'Host: ' + IPAddr + ' - ' + data.HostIPs[i].InterfaceName; options.push({ 'Value': value, 'IPAddr': IPAddr }); // TODO: look up associated endpoint name //modalScope.assignments.ip.EndpointName = "Boogity"; // set the default value to the currently assigned value if (modalScope.assignments.ip.IPAddress === IPAddr) { modalScope.assignments.value = options[options.length - 1]; } } } //virtual ips if (data && data.VirtualIPs) { for (i = 0; i < data.VirtualIPs.length; ++i) { IPAddr = data.VirtualIPs[i].IP; value = "Virtual IP: " + IPAddr; options.push({ 'Value': value, 'IPAddr': IPAddr }); // set the default value to the currently assigned value if (modalScope.assignments.ip.IPAddr === IPAddr) { modalScope.assignments.value = options[options.length - 1]; } } } //default to automatic if necessary if (!modalScope.assignments.value) { modalScope.assignments.value = options[0]; } modalScope.assignments.options = options; $modalService.create({ templateUrl: "assign-ip.html", model: modalScope, title: "assign_ip", actions: [ { role: "cancel" }, { role: "ok", label: "assign_ip", action: function () { if (this.validate()) { // disable ok button, and store the re-enable function var enableSubmit = this.disableSubmitButton(); var serviceID = modalScope.assignments.ip.ServiceID; var IP = modalScope.assignments.value.IPAddr; resourcesFactory.assignIP(serviceID, IP) .success(function (data, status) { $notification.create("Added IP", data.Detail).success(); update(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Unable to Assign IP", data.Detail).error(); enableSubmit(); }.bind(this)); } } } ] }); }) .error((data, status) => { $notification.create("Unable to retrieve IPs", data.Detail).error(); }); }; $scope.anyServicesExported = function (service) { if($scope.currentService && $scope.currentService.exportedServiceEndpoints){ return $scope.currentService.exportedServiceEndpoints.length > 0; } else { return false; } }; $scope.publicEndpointProtocol = function (publicEndpoint) { if ($scope.getEndpointType(publicEndpoint) === "vhost") { return "https"; } else { if (publicEndpoint.Protocol !== "") { return publicEndpoint.Protocol; } if (publicEndpoint.UseTLS) { return "other (TLS)"; } return "other"; } }; $scope.indent = function (depth) { return { 'padding-left': (24 * depth) + "px" }; }; // sets a service to start, stop or restart state $scope.setServiceState = function (service, state, skipChildren) { // service[state]() ends up translating to something like // service.start() or service.stop() service[state](skipChildren).error(function (data, status) { $notification.create("Unable to " + state + " service", data.Detail).error(); }); }; $scope.getEndpointType = function (endpoint) { return endpoint.VHostName ? "vhost" : "port"; }; // filters to be used when counting how many descendent // services will be affected by a state change var serviceStateChangeFilters = { // only stopped services will be started "start": service => service.desiredState === 0, // only started services will be stopped "stop": service => service.desiredState === 1, // only started services will be restarted "restart": service => service.desiredState === 1 }; // clicks to a service's start, stop, or restart // button should first determine if the service has // children and ask the user to choose to start all // children or only the top service $scope.clickRunning = function (service, state) { var filterFn = serviceStateChangeFilters[state]; var childCount = utils.countTheKids(service, filterFn); // if the service has affected children, check if the user // wants to start just the service, or the service and children if (childCount > 0) { $scope.modal_confirmSetServiceState(service, state, childCount); // if no children, just start the service } else { $scope.setServiceState(service, state); } }; // verifies if use wants to start parent service, or parent // and all children $scope.modal_confirmSetServiceState = function (service, state, childCount) { $modalService.create({ template: ["<h4>" + $translate.instant("choose_services_" + state) + "</h4><ul>", "<li>" + $translate.instant(state + "_service_name", { name: "<strong>" + service.name + "</strong>" }) + "</li>", "<li>" + $translate.instant(state + "_service_name_and_children", { name: "<strong>" + service.name + "</strong>", count: "<strong>" + childCount + "</strong>" }) + "</li></ul>" ].join(""), model: $scope, title: $translate.instant(state + "_service"), actions: [ { role: "cancel" }, { role: "ok", classes: " ", label: $translate.instant(state + "_service"), action: function () { // the arg here explicitly prevents child services // from being started $scope.setServiceState(service, state, true); this.close(); } }, { role: "ok", label: $translate.instant(state + "_service_and_children", { count: childCount }), action: function () { $scope.setServiceState(service, state); this.close(); } } ] }); }; $scope.clickEndpointEnable = function (publicEndpoint) { if ($scope.getEndpointType(publicEndpoint) === "vhost") { resourcesFactory.enableVHost(publicEndpoint.ApplicationId, publicEndpoint.Application, publicEndpoint.ServiceEndpoint, publicEndpoint.Name) .error((data, status) => { $notification.create("Enable Public Endpoint failed", data.Detail).error(); }); } else if ($scope.getEndpointType(publicEndpoint) === "port") { resourcesFactory.enablePort(publicEndpoint.ApplicationId, publicEndpoint.Application, publicEndpoint.ServiceEndpoint, publicEndpoint.PortAddr) .error((data, status) => { $notification.create("Enable Public Endpoint failed", data.Detail).error(); }); } }; $scope.clickEndpointDisable = function (publicEndpoint) { if ($scope.getEndpointType(publicEndpoint) === "vhost") { resourcesFactory.disableVHost(publicEndpoint.ApplicationId, publicEndpoint.Application, publicEndpoint.ServiceEndpoint, publicEndpoint.Name) .error((data, status) => { $notification.create("Disable Public Endpoint failed", data.Detail).error(); }); } else if ($scope.getEndpointType(publicEndpoint) === "port") { resourcesFactory.disablePort(publicEndpoint.ApplicationId, publicEndpoint.Application, publicEndpoint.ServiceEndpoint, publicEndpoint.PortAddr) .error((data, status) => { $notification.create("Disable Public Endpoint failed", data.Detail).error(); }); } }; $scope.clickEditContext = function (contextFileId) { //edit variables (context) of current service let modalScope = $scope.$new(true); resourcesFactory.v2.getServiceContext(contextFileId) .then(function (data) { //set editor options for context editing modalScope.codemirrorOpts = { lineNumbers: true, mode: "properties" }; // this is the text bound to the modal texarea modalScope.Context = makeEditableContext(data); // now that we have the text of the file, create modal dialog $modalService.create({ templateUrl: "edit-context.html", model: modalScope, title: $translate.instant("edit_context"), actions: [ { role: "cancel" }, { role: "ok", label: $translate.instant("btn_save"), action: function () { // disable ok button, and store the re-enable function let enableSubmit = this.disableSubmitButton(); let storableContext = makeStorableContext(modalScope.Context); resourcesFactory.v2.updateServiceContext(contextFileId, storableContext) .success(function (data, status) { $notification.create("Updated variables for", $scope.currentService.name).success(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Update variables failed", data.Detail).error(); enableSubmit(); }.bind(this)); } } ], onShow: function () { modalScope.codemirrorRefresh = true; }, onHide: function () { modalScope.codemirrorRefresh = false; } }); }); }; function makeEditableContext(context) { var editableContext = ""; for (var key in context) { editableContext += key + " " + context[key] + "\n"; } if (!editableContext) { editableContext = ""; } return editableContext; } function makeStorableContext(context) { //turn editableContext into a JSON object var lines = context.split("\n"), storable = {}; lines.forEach(function (line) { var delimitIndex, key, val; if (line !== "") { delimitIndex = line.indexOf(" "); if (delimitIndex !== -1) { key = line.substr(0, delimitIndex); val = line.substr(delimitIndex + 1); storable[key] = val; } else { context[line] = ""; } } }); return JSON.stringify(storable); } $scope.clickRemovePublicEndpoint = function (publicEndpoint) { $modalService.create({ template: $translate.instant("remove_public_endpoint") + ": <strong>" + (publicEndpoint.ServiceName ? publicEndpoint.ServiceName : "port " + publicEndpoint.PortAddress) + "</strong><br><br>", model: $scope, title: "remove_public_endpoint", actions: [ { role: "cancel" }, { role: "ok", label: "remove_public_endpoint_confirm", classes: "btn-danger", action: function () { if ($scope.getEndpointType(publicEndpoint) === "vhost") { resourcesFactory.removeVHost(publicEndpoint.ServiceID, publicEndpoint.Application, publicEndpoint.VHostName) .success(() => { // reload the table refreshEndpoints(); $notification.create("Removed Public Endpoint", publicEndpoint.Application).success(); }) .error((data, status) => { $notification.create("Remove Public Endpoint failed", data.Detail).error(); }); } else if ($scope.getEndpointType(publicEndpoint) === "port") { resourcesFactory.removePort(publicEndpoint.ServiceID, publicEndpoint.Application, publicEndpoint.PortAddress) .success(() => { // reload the table refreshEndpoints(); $notification.create("Removed Public Endpoint", publicEndpoint.PortName).success(); }) .error((data, status) => { $notification.create("Remove Public Endpoint failed", data.Detail).error(); }); } this.close(); } } ] }); }; $scope.editConfig = function (configFileId) { let modalScope = $scope.$new(true); // TODO - pop the modal up FIRST and show // a loading animation while the request is filled resourcesFactory.v2.getServiceConfig(configFileId) .then(function (data) { //set editor options for context editing modalScope.codemirrorOpts = { lineNumbers: true, mode: utils.getModeFromFilename(data.Filename) }; // this is the text bound to the modal texarea angular.extend(modalScope, data); // now that we have the text of the file, create modal dialog $modalService.create({ templateUrl: "edit-config.html", model: modalScope, title: $translate.instant("title_edit_config") + " - " + modalScope.Filename, bigModal: true, actions: [ { role: "cancel" }, { role: "ok", label: $translate.instant("btn_save"), action: function () { if (this.validate()) { // disable ok button, and store the re-enable function var enableSubmit = this.disableSubmitButton(); resourcesFactory.v2.updateServiceConfig(configFileId, modalScope) .success(function (data, status) { $notification.create("Updated configuation file", data.Filename).success(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Update configuration failed", data.Detail).error(); enableSubmit(); }.bind(this)); } } } ], validate: function () { // TODO - actually validate return true; }, onShow: function () { modalScope.codemirrorRefresh = true; }, onHide: function () { modalScope.codemirrorRefresh = false; } }); }); }; $scope.viewLog = function (instance) { let logScope = $scope.$new(true); resourcesFactory.getInstanceLogs(instance.model.ServiceID, instance.id) .success(function (log) { logScope.log = log.Detail; $modalService.create({ templateUrl: "view-log.html", model: logScope, title: "title_log", bigModal: true, actions: [ { role: "cancel", label: "close" }, { classes: "btn-primary", label: "refresh", icon: "glyphicon-repeat", action: function () { var textarea = this.$el.find("textarea"); resourcesFactory.getInstanceLogs(instance.model.ServiceID, instance.id) .success(function (log) { logScope.log = log.Detail; textarea.scrollTop(textarea[0].scrollHeight - textarea.height()); }) .error((data, status) => { this.createNotification("Unable to fetch logs", data.Detail).error(); }); } }, { classes: "btn-primary", label: "download", action: function () { utils.downloadFile('/services/' + instance.model.ServiceID + '/' + instance.id + '/logs/download'); }, icon: "glyphicon-download" } ], onShow: function () { var textarea = this.$el.find("textarea"); textarea.scrollTop(textarea[0].scrollHeight - textarea.height()); } }); }) .error(function (data, status) { $notification.create("Unable to fetch logs", data.Detail).error(); }); }; $scope.subNavClick = function (crumb) { if (crumb.id) { $scope.routeToService(crumb.id); } else { // TODO - just call subnavs usual function $location.path(crumb.url); } }; // grab default kibana search configs and adjust // the query to look for this specific service $scope.getServiceLogURL = function (service) { if (!service) { return ""; } let appConfig = LogSearch.getDefaultAppConfig(), globalConfig = LogSearch.getDefaultGlobalConfig(); appConfig.query = { query_string: { analyze_wildcard: true, query: `fields.service:${service.id} AND fields.instance:* AND message:*` } }; appConfig.columns = ["fields.instance", "message"]; return LogSearch.getURL(appConfig, globalConfig); }; // grab default kibana search configs and adjust // the query to look for this specific service instance $scope.getInstanceLogURL = function (instance) { let appConfig = LogSearch.getDefaultAppConfig(), globalConfig = LogSearch.getDefaultGlobalConfig(); appConfig.query = { query_string: { analyze_wildcard: true, query: `fields.service:${instance.model.ServiceID} AND fields.instance:${instance.model.InstanceID} AND message:*` } }; appConfig.columns = ["message"]; return LogSearch.getURL(appConfig, globalConfig); }; // updates URL and current service ID // which triggers UI update $scope.routeToService = function (id, e) { // if an event is present, we may // need to prevent it from performing // default navigation behavior if (e) { // ctrl click opens in new tab, // so allow that to happen and don't // bother routing the current view if (e.ctrlKey) { return; } // if middle click, don't update // current view if (e.button === 1) { return; } // otherwise, prevent default so // we can handle the view routing e.preventDefault(); } $location.update_path("/services/" + id, true); $scope.params.serviceId = id; }; // restart all running instances for this service $scope.killRunningInstances = function (app) { resourcesFactory.restartService(app.ID) .error((data, status) => { $notification.create("Stop Service failed", data.Detail).error(); }); }; $scope.getHostName = function (id) { if (hostsFactory.get(id)) { return hostsFactory.get(id).name; } else { // TODO - if unknown host, dont make linkable // and use custom css to show unknown return "unknown"; } }; // expand/collapse state of service tree nodes $scope.serviceTreeState = CCUIState.get($cookies.get("ZUsername"), "serviceTreeState"); // servicedTreeState is a collection of objects // describing if nodes in a service tree are hidden or collapsed. // It is first keyed by the id of the current service context (the // service who's name is at the top of the page), then keyed by // the service in question. eg: // // current service id // -> child service id // -> hidden // -> collapsed // -> child service id // -> hidden // -> collapsed // ... $scope.toggleChildren = function (service) { if (!$scope.currentService) { console.warn("Cannot store toggle state: no current service"); return; } if ($scope.currentTreeState[service.id].collapsed) { $scope.currentTreeState[service.id].collapsed = false; if (service.subservices.length) { $scope.showChildren(service); } else { service.fetchServiceChildren().then(() => { $scope.flattenServicesTree(); $scope.currentService.updateDescendentStatuses(); }); } } else { $scope.currentTreeState[service.id].collapsed = true; $scope.flattenServicesTree(); $scope.currentService.updateDescendentStatuses(); $scope.hideChildren(service); } }; $scope.getServiceEndpoints = function (id) { let deferred = $q.defer(); resourcesFactory.v2.getServiceEndpoints(id) .then(function (response) { console.log("got service endpoints for id " + id); deferred.resolve(response.data); }, function (response) { console.warn(response.status + " " + response.statusText); deferred.reject(response.statusText); }); return deferred.promise; }; $scope.hideChildren = function (service) { // get the state of the current service's tree var treeState = $scope.currentTreeState; if (service.subservices.length) { service.subservices.forEach(function (child) { treeState[child.id].hidden = true; $scope.hideChildren(child); }); } }; $scope.showChildren = function (service) { var treeState = $scope.currentTreeState; if (service.subservices.length) { service.subservices.forEach(function (child) { treeState[child.id].hidden = false; // if this child service is not marked // as collapsed, show its children if (!treeState[child.id].collapsed) { $scope.showChildren(child); } }); } }; //we need to bring this function into scope so we can use ng-hide if an object is empty $scope.isEmptyObject = function (obj) { return angular.equals({}, obj); }; $scope.isIsvc = function (service) { return service.isIsvc(); }; $scope.hasCurrentInstances = function () { return $scope.currentService && $scope.currentService.hasInstances(); }; $scope.editCurrentService = function () { // clone service for editing $scope.editableService = angular.copy($scope.currentService.model); $modalService.create({ templateUrl: "edit-service.html", model: $scope, title: "title_edit_service", actions: [ { role: "cancel" }, { role: "ok", label: "btn_save_changes", action: function () { if (this.validate()) { // disable ok button, and store the re-enable function var enableSubmit = this.disableSubmitButton(); // update service with recently edited service resourcesFactory.v2.updateService($scope.editableService.ID, $scope.editableService) .success(function (data, status) { $notification.create("Updated service", $scope.editableService.Name).success(); update(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Update service failed", data.Detail).error(); enableSubmit(); }.bind(this)); } } } ], validate: function () { if ($scope.editableService.InstanceLimits.Min > $scope.editableService.Instances || $scope.editableService.Instances === undefined) { return false; } return true; } }); }; // TODO - clean up magic numbers $scope.calculateIndent = function (depth) { let offset = 1; if ($scope.currentService && $scope.currentService.parent) { offset = $scope.currentService.parent.depth + 2; } return $scope.indent(depth - offset); }; $scope.setCurrentTreeState = function () { $scope.serviceTreeState[$scope.currentService.id] = {}; }; $scope.flattenServicesTree = function () { // flatten the current service's subservices tree. let rows = []; let treeState = $scope.currentTreeState; (function flatten(service, depth) { if (!treeState[service.id]) { treeState[service.id] = { collapsed: true, hidden: false }; } let rowState = treeState[service.id]; // TODO - merge rather than overwrite to avoid // causing the entire tree to bounce let rowItem = { service: service, depth: depth, collapsed: rowState.collapsed, hidden: rowState.hidden }; rows.push(rowItem); if (service.subservices.length) { $filter('orderBy')(service.subservices, 'name') .forEach(svc => flatten(svc, depth + 1)); } })($scope.currentService, 0); // rows[0] is always the top level service, so slice that off $scope.currentDescendents = rows.slice(1); }; $scope.fetchBreadcrumbs = function () { resourcesFactory.v2.getServiceAncestors($scope.currentService.id) .then(current => { $scope.breadcrumbs = makeCrumbs(current); }, error => { console.warn(error); }); }; // constructs a new current service $scope.setCurrentService = function () { $scope.currentService = undefined; resourcesFactory.v2.getService($scope.params.serviceId) .then(function (model) { $scope.currentService = new Service(model); $scope.currentDescendents = []; $scope.currentService.fetchServiceChildren() .then(() => { $scope.flattenServicesTree(); $scope.currentService.updateDescendentStatuses(); }); // sets $scope.breadcrumbs $scope.fetchBreadcrumbs(); // update serviceTreeState $scope.setCurrentTreeState(); // property for view to bind for tree state NOTE: WHA???? $scope.currentTreeState = $scope.serviceTreeState[$scope.currentService.id]; // fetchAll() will trigger update at completion $scope.currentService.fetchAll(true); // update fast-moving statuses $scope.currentService.fetchAllStates(); }); }; function refreshEndpoints() { $scope.currentService.fetchEndpoints(true); } function update() { // update service model data resourcesFactory.v2.getService($scope.params.serviceId) .then(function (model) { $scope.currentService.update(model); }); // update fast-moving statuses $scope.currentService.fetchAllStates(); $scope.currentService.updateDescendentStatuses(); } function init() { $scope.name = "servicedetails"; $scope.params = $routeParams; $scope.breadcrumbs = [ { label: 'breadcrumb_deployed', url: '/apps' } ]; $scope.publicEndpointsTable = { sorting: { ServiceEndpoint: "asc" } }; $scope.ipsTable = { sorting: { ServiceName: "asc" } }; $scope.configTable = { sorting: { Filename: "asc" } }; $scope.instancesTable = { sorting: { "model.InstanceID": "asc" }, // instead of watching for a change, always // reload at a specified interval watchExpression: (function () { var last = new Date().getTime(), now, interval = 1000; return function () { now = new Date().getTime(); if (now - last > interval) { last = now; return now; } }; })() }; // servicesTable should not be sortable since it // is a hierarchy. $scope.servicesTable = { disablePagination: true }; $scope.ips = {}; // pools are needed for edit service dialog $scope.pools = poolsFactory.poolList; // if the current service changes, update // various service controller thingies $scope.$watch("params.serviceId", $scope.setCurrentService); hostsFactory.activate(); hostsFactory.update(); // TODO - use UI_POLL_INTERVAL let intervalVal = setInterval(function () { if ($scope.currentService) { $scope.currentService.fetchAllStates(); $scope.currentService.updateDescendentStatuses(); } }, 3000); poolsFactory.activate(); poolsFactory.update(); $scope.$on("$destroy", function () { clearInterval(intervalVal); // servicesFactory.deactivate(); hostsFactory.deactivate(); poolsFactory.deactivate(); }); } // kick off controller init(); function makeCrumbs(current) { var crumbs = [{ label: current.Name, itemClass: "active", id: current.ID }]; (function recurse(service) { if (service) { crumbs.unshift({ label: service.Name, url: "/services/" + service.ID, id: service.ID }); recurse(service.Parent); } })(current.Parent); crumbs.unshift({ label: "Applications", url: "/apps" }); return crumbs; } }]); })();
web/ui/src/Services/ServiceDetailsController.js
/* globals controlplane: true */ /* ServiceDetailsController * Displays details of a specific service */ (function () { 'use strict'; // share angular services outside of angular context let $notification, serviceHealth, $q, resourcesFactory, utils; controlplane.controller("ServiceDetailsController", [ "$scope", "$q", "$routeParams", "$location", "resourcesFactory", "authService", "$modalService", "$translate", "$notification", "$timeout", "miscUtils", "hostsFactory", "$serviceHealth", "Service", "poolsFactory", "CCUIState", "$cookies", "areUIReady", "LogSearch", "$filter", function ($scope, _$q, $routeParams, $location, _resourcesFactory, authService, $modalService, $translate, _$notification, $timeout, _utils, hostsFactory, _serviceHealth, Service, poolsFactory, CCUIState, $cookies, areUIReady, LogSearch, $filter) { // api access via angular context $notification = _$notification; serviceHealth = _serviceHealth; $q = _$q; resourcesFactory = _resourcesFactory; utils = _utils; // Ensure logged in authService.checkLogin($scope); $scope.hostsFactory = hostsFactory; $scope.defaultHostAlias = $location.host(); if (utils.needsHostAlias($location.host())) { resourcesFactory.getHostAlias().success(function (data) { $scope.defaultHostAlias = data.hostalias; }); } //add Public Endpoint data $scope.publicEndpoints = { add: {} }; //add service endpoint data $scope.exportedServiceEndpoints = {}; $scope.clickPool = function (id) { resourcesFactory.routeToPool(id); }; $scope.clickHost = function (id) { resourcesFactory.routeToHost(id); }; $scope.modalAddPublicEndpoint = function () { areUIReady.lock(); $scope.protocols = []; $scope.protocols.push({ Label: "HTTPS", UseTLS: true, Protocol: "https" }); $scope.protocols.push({ Label: "HTTP", UseTLS: false, Protocol: "http" }); $scope.protocols.push({ Label: "Other, secure (TLS)", UseTLS: true, Protocol: "" }); $scope.protocols.push({ Label: "Other, non-secure", UseTLS: false, Protocol: "" }); // default public endpoint options $scope.publicEndpoints.add = { type: "port", endpoint: $scope.currentService.exportedServiceEndpoints[0], name: "", host: $scope.defaultHostAlias, port: "", protocol: $scope.protocols[0], }; // returns an error string if newPublicEndpoint's vhost is invalid var validateVHost = function (newPublicEndpoint) { var name = newPublicEndpoint.name; // if no port if (!name || !name.length) { return "Missing Name"; } // if name already exists for (var i in $scope.publicEndpoints.data) { if (name === $scope.publicEndpoints.data[i].Name) { return "Name already exists: " + newPublicEndpoint.name; } } // if invalid characters var re = /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/; if (!re.test(name)) { return $translate.instant("vhost_name_invalid") + " " + newPublicEndpoint.name; } }; // returns an error string if newPublicEndpoint's port is invalid var validatePort = function (newPublicEndpoint) { var host = newPublicEndpoint.host; var port = newPublicEndpoint.port; if (!host || !host.length) { return "Missing host name"; } // if invalid characters var re = /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/; if (!re.test(host)) { return $translate.instant("host_name_invalid") + ": " + host; } // if no port if (!port || !port.length) { return "Missing port"; } if (+port < 1 || +port > 65536) { return "Port must be between 1 and 65536"; } }; $modalService.create({ templateUrl: "add-public-endpoint.html", model: $scope, title: "add_public_endpoint", actions: [ { role: "cancel", action: function () { this.close(); } }, { role: "ok", label: "add_public_endpoint_confirm", action: function () { var newPublicEndpoint = $scope.publicEndpoints.add; if (this.validate(newPublicEndpoint)) { // disable ok button, and store the re-enable function var enableSubmit = this.disableSubmitButton(); $scope.addPublicEndpoint(newPublicEndpoint) .success(function (data, status) { // reload the table refreshEndpoints(); $notification.create("Added public endpoint").success(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Unable to add public endpoint", data.Detail).error(); enableSubmit(); }.bind(this)); } } } ], validate: function (newPublicEndpoint) { // if no service endpoint selected if (!newPublicEndpoint.endpoint) { this.createNotification("Unable to add Public Endpoint", "No service endpoint selected").error(); return false; } // perform type specific validation if (newPublicEndpoint.type === "vhost") { var err = validateVHost(newPublicEndpoint); if (err) { this.createNotification("Unable to add Public Endpoint", err).error(); } else { return true; } } else if (newPublicEndpoint.type === "port") { var err = validatePort(newPublicEndpoint); if (err) { this.createNotification("Unable to add Public Endpoint", err).error(); return false; } else { return true; } } }, onShow: () => { areUIReady.unlock(); } }); }; $scope.addPublicEndpoint = function (newPublicEndpoint) { var serviceId = newPublicEndpoint.endpoint.ServiceID; var serviceName = newPublicEndpoint.endpoint.ServiceName; var serviceEndpoint = newPublicEndpoint.endpoint.Application; if (newPublicEndpoint.type === "vhost") { var vhostName = newPublicEndpoint.name; return resourcesFactory.addVHost(serviceId, serviceName, serviceEndpoint, vhostName); } else if (newPublicEndpoint.type === "port") { var port = newPublicEndpoint.host + ":" + newPublicEndpoint.port; var usetls = newPublicEndpoint.protocol.UseTLS; var protocol = newPublicEndpoint.protocol.Protocol; return resourcesFactory.addPort(serviceId, serviceName, serviceEndpoint, port, usetls, protocol); } }; // modalAssignIP opens a modal view to assign an ip address to a service $scope.modalAssignIP = function (ip, poolID) { let modalScope = $scope.$new(true); modalScope.assignments = { 'ip': ip, 'value': null }; resourcesFactory.getPoolIPs(poolID) .success(function (data) { let options = [{ 'Value': 'Automatic', 'IPAddr': '' }]; let i, IPAddr, value; //host ips if (data && data.HostIPs) { for (i = 0; i < data.HostIPs.length; ++i) { IPAddr = data.HostIPs[i].IPAddress; value = 'Host: ' + IPAddr + ' - ' + data.HostIPs[i].InterfaceName; options.push({ 'Value': value, 'IPAddr': IPAddr }); // TODO: look up associated endpoint name //modalScope.assignments.ip.EndpointName = "Boogity"; // set the default value to the currently assigned value if (modalScope.assignments.ip.IPAddress === IPAddr) { modalScope.assignments.value = options[options.length - 1]; } } } //virtual ips if (data && data.VirtualIPs) { for (i = 0; i < data.VirtualIPs.length; ++i) { IPAddr = data.VirtualIPs[i].IP; value = "Virtual IP: " + IPAddr; options.push({ 'Value': value, 'IPAddr': IPAddr }); // set the default value to the currently assigned value if (modalScope.assignments.ip.IPAddr === IPAddr) { modalScope.assignments.value = options[options.length - 1]; } } } //default to automatic if necessary if (!modalScope.assignments.value) { modalScope.assignments.value = options[0]; } modalScope.assignments.options = options; $modalService.create({ templateUrl: "assign-ip.html", model: modalScope, title: "assign_ip", actions: [ { role: "cancel" }, { role: "ok", label: "assign_ip", action: function () { if (this.validate()) { // disable ok button, and store the re-enable function var enableSubmit = this.disableSubmitButton(); var serviceID = modalScope.assignments.ip.ServiceID; var IP = modalScope.assignments.value.IPAddr; resourcesFactory.assignIP(serviceID, IP) .success(function (data, status) { $notification.create("Added IP", data.Detail).success(); update(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Unable to Assign IP", data.Detail).error(); enableSubmit(); }.bind(this)); } } } ] }); }) .error((data, status) => { $notification.create("Unable to retrieve IPs", data.Detail).error(); }); }; $scope.anyServicesExported = function (service) { if($scope.currentService && $scope.currentService.exportedServiceEndpoints){ return $scope.currentService.exportedServiceEndpoints.length > 0; } else { return false; } }; $scope.publicEndpointProtocol = function (publicEndpoint) { if ($scope.getEndpointType(publicEndpoint) === "vhost") { return "https"; } else { if (publicEndpoint.Protocol !== "") { return publicEndpoint.Protocol; } if (publicEndpoint.UseTLS) { return "other (TLS)"; } return "other"; } }; $scope.indent = function (depth) { return { 'padding-left': (24 * depth) + "px" }; }; // sets a service to start, stop or restart state $scope.setServiceState = function (service, state, skipChildren) { // service[state]() ends up translating to something like // service.start() or service.stop() service[state](skipChildren).error(function (data, status) { $notification.create("Unable to " + state + " service", data.Detail).error(); }); }; $scope.getEndpointType = function (endpoint) { return endpoint.VHostName ? "vhost" : "port"; }; // filters to be used when counting how many descendent // services will be affected by a state change var serviceStateChangeFilters = { // only stopped services will be started "start": service => service.desiredState === 0, // only started services will be stopped "stop": service => service.desiredState === 1, // only started services will be restarted "restart": service => service.desiredState === 1 }; // clicks to a service's start, stop, or restart // button should first determine if the service has // children and ask the user to choose to start all // children or only the top service $scope.clickRunning = function (service, state) { var filterFn = serviceStateChangeFilters[state]; var childCount = utils.countTheKids(service, filterFn); // if the service has affected children, check if the user // wants to start just the service, or the service and children if (childCount > 0) { $scope.modal_confirmSetServiceState(service, state, childCount); // if no children, just start the service } else { $scope.setServiceState(service, state); } }; // verifies if use wants to start parent service, or parent // and all children $scope.modal_confirmSetServiceState = function (service, state, childCount) { $modalService.create({ template: ["<h4>" + $translate.instant("choose_services_" + state) + "</h4><ul>", "<li>" + $translate.instant(state + "_service_name", { name: "<strong>" + service.name + "</strong>" }) + "</li>", "<li>" + $translate.instant(state + "_service_name_and_children", { name: "<strong>" + service.name + "</strong>", count: "<strong>" + childCount + "</strong>" }) + "</li></ul>" ].join(""), model: $scope, title: $translate.instant(state + "_service"), actions: [ { role: "cancel" }, { role: "ok", classes: " ", label: $translate.instant(state + "_service"), action: function () { // the arg here explicitly prevents child services // from being started $scope.setServiceState(service, state, true); this.close(); } }, { role: "ok", label: $translate.instant(state + "_service_and_children", { count: childCount }), action: function () { $scope.setServiceState(service, state); this.close(); } } ] }); }; $scope.clickEndpointEnable = function (publicEndpoint) { if ($scope.getEndpointType(publicEndpoint) === "vhost") { resourcesFactory.enableVHost(publicEndpoint.ApplicationId, newPublicEndpoint.Application, newPublicEndpoint.ServiceEndpoint, newPublicEndpoint.Name) .error((data, status) => { $notification.create("Enable Public Endpoint failed", data.Detail).error(); }); } else if ($scope.getEndpointType(publicEndpoint) === "port") { resourcesFactory.enablePort(publicEndpoint.ApplicationId, newPublicEndpoint.Application, newPublicEndpoint.ServiceEndpoint, newPublicEndpoint.PortAddr) .error((data, status) => { $notification.create("Enable Public Endpoint failed", data.Detail).error(); }); } }; $scope.clickEndpointDisable = function (publicEndpoint) { if ($scope.getEndpointType(publicEndpoint) === "vhost") { resourcesFactory.disableVHost(publicEndpoint.ApplicationId, publicEndpoint.Application, publicEndpoint.ServiceEndpoint, publicEndpoint.Name) .error((data, status) => { $notification.create("Disable Public Endpoint failed", data.Detail).error(); }); } else if ($scope.getEndpointType(publicEndpoint) === "port") { resourcesFactory.disablePort(publicEndpoint.ApplicationId, publicEndpoint.Application, publicEndpoint.ServiceEndpoint, publicEndpoint.PortAddr) .error((data, status) => { $notification.create("Disable Public Endpoint failed", data.Detail).error(); }); } }; $scope.clickEditContext = function (contextFileId) { //edit variables (context) of current service let modalScope = $scope.$new(true); resourcesFactory.v2.getServiceContext(contextFileId) .then(function (data) { //set editor options for context editing modalScope.codemirrorOpts = { lineNumbers: true, mode: "properties" }; // this is the text bound to the modal texarea modalScope.Context = makeEditableContext(data); // now that we have the text of the file, create modal dialog $modalService.create({ templateUrl: "edit-context.html", model: modalScope, title: $translate.instant("edit_context"), actions: [ { role: "cancel" }, { role: "ok", label: $translate.instant("btn_save"), action: function () { // disable ok button, and store the re-enable function let enableSubmit = this.disableSubmitButton(); let storableContext = makeStorableContext(modalScope.Context); resourcesFactory.v2.updateServiceContext(contextFileId, storableContext) .success(function (data, status) { $notification.create("Updated variables for", $scope.currentService.name).success(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Update variables failed", data.Detail).error(); enableSubmit(); }.bind(this)); } } ], onShow: function () { modalScope.codemirrorRefresh = true; }, onHide: function () { modalScope.codemirrorRefresh = false; } }); }); }; function makeEditableContext(context) { var editableContext = ""; for (var key in context) { editableContext += key + " " + context[key] + "\n"; } if (!editableContext) { editableContext = ""; } return editableContext; } function makeStorableContext(context) { //turn editableContext into a JSON object var lines = context.split("\n"), storable = {}; lines.forEach(function (line) { var delimitIndex, key, val; if (line !== "") { delimitIndex = line.indexOf(" "); if (delimitIndex !== -1) { key = line.substr(0, delimitIndex); val = line.substr(delimitIndex + 1); storable[key] = val; } else { context[line] = ""; } } }); return JSON.stringify(storable); } $scope.clickRemovePublicEndpoint = function (publicEndpoint) { $modalService.create({ template: $translate.instant("remove_public_endpoint") + ": <strong>" + (publicEndpoint.ServiceName ? publicEndpoint.ServiceName : "port " + publicEndpoint.PortAddress) + "</strong><br><br>", model: $scope, title: "remove_public_endpoint", actions: [ { role: "cancel" }, { role: "ok", label: "remove_public_endpoint_confirm", classes: "btn-danger", action: function () { if ($scope.getEndpointType(publicEndpoint) === "vhost") { resourcesFactory.removeVHost(publicEndpoint.ServiceID, publicEndpoint.Application, publicEndpoint.VHostName) .success(() => { // reload the table refreshEndpoints(); $notification.create("Removed Public Endpoint", publicEndpoint.Application).success(); }) .error((data, status) => { $notification.create("Remove Public Endpoint failed", data.Detail).error(); }); } else if ($scope.getEndpointType(publicEndpoint) === "port") { resourcesFactory.removePort(publicEndpoint.ServiceID, publicEndpoint.Application, publicEndpoint.PortAddress) .success(() => { // reload the table refreshEndpoints(); $notification.create("Removed Public Endpoint", publicEndpoint.PortName).success(); }) .error((data, status) => { $notification.create("Remove Public Endpoint failed", data.Detail).error(); }); } this.close(); } } ] }); }; $scope.editConfig = function (configFileId) { let modalScope = $scope.$new(true); // TODO - pop the modal up FIRST and show // a loading animation while the request is filled resourcesFactory.v2.getServiceConfig(configFileId) .then(function (data) { //set editor options for context editing modalScope.codemirrorOpts = { lineNumbers: true, mode: utils.getModeFromFilename(data.Filename) }; // this is the text bound to the modal texarea angular.extend(modalScope, data); // now that we have the text of the file, create modal dialog $modalService.create({ templateUrl: "edit-config.html", model: modalScope, title: $translate.instant("title_edit_config") + " - " + modalScope.Filename, bigModal: true, actions: [ { role: "cancel" }, { role: "ok", label: $translate.instant("btn_save"), action: function () { if (this.validate()) { // disable ok button, and store the re-enable function var enableSubmit = this.disableSubmitButton(); resourcesFactory.v2.updateServiceConfig(configFileId, modalScope) .success(function (data, status) { $notification.create("Updated configuation file", data.Filename).success(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Update configuration failed", data.Detail).error(); enableSubmit(); }.bind(this)); } } } ], validate: function () { // TODO - actually validate return true; }, onShow: function () { modalScope.codemirrorRefresh = true; }, onHide: function () { modalScope.codemirrorRefresh = false; } }); }); }; $scope.viewLog = function (instance) { let logScope = $scope.$new(true); resourcesFactory.getInstanceLogs(instance.model.ServiceID, instance.id) .success(function (log) { logScope.log = log.Detail; $modalService.create({ templateUrl: "view-log.html", model: logScope, title: "title_log", bigModal: true, actions: [ { role: "cancel", label: "close" }, { classes: "btn-primary", label: "refresh", icon: "glyphicon-repeat", action: function () { var textarea = this.$el.find("textarea"); resourcesFactory.getInstanceLogs(instance.model.ServiceID, instance.id) .success(function (log) { logScope.log = log.Detail; textarea.scrollTop(textarea[0].scrollHeight - textarea.height()); }) .error((data, status) => { this.createNotification("Unable to fetch logs", data.Detail).error(); }); } }, { classes: "btn-primary", label: "download", action: function () { utils.downloadFile('/services/' + instance.model.ServiceID + '/' + instance.id + '/logs/download'); }, icon: "glyphicon-download" } ], onShow: function () { var textarea = this.$el.find("textarea"); textarea.scrollTop(textarea[0].scrollHeight - textarea.height()); } }); }) .error(function (data, status) { $notification.create("Unable to fetch logs", data.Detail).error(); }); }; $scope.subNavClick = function (crumb) { if (crumb.id) { $scope.routeToService(crumb.id); } else { // TODO - just call subnavs usual function $location.path(crumb.url); } }; // grab default kibana search configs and adjust // the query to look for this specific service $scope.getServiceLogURL = function (service) { if (!service) { return ""; } let appConfig = LogSearch.getDefaultAppConfig(), globalConfig = LogSearch.getDefaultGlobalConfig(); appConfig.query = { query_string: { analyze_wildcard: true, query: `fields.service:${service.id} AND fields.instance:* AND message:*` } }; appConfig.columns = ["fields.instance", "message"]; return LogSearch.getURL(appConfig, globalConfig); }; // grab default kibana search configs and adjust // the query to look for this specific service instance $scope.getInstanceLogURL = function (instance) { let appConfig = LogSearch.getDefaultAppConfig(), globalConfig = LogSearch.getDefaultGlobalConfig(); appConfig.query = { query_string: { analyze_wildcard: true, query: `fields.service:${instance.model.ServiceID} AND fields.instance:${instance.model.InstanceID} AND message:*` } }; appConfig.columns = ["message"]; return LogSearch.getURL(appConfig, globalConfig); }; // updates URL and current service ID // which triggers UI update $scope.routeToService = function (id, e) { // if an event is present, we may // need to prevent it from performing // default navigation behavior if (e) { // ctrl click opens in new tab, // so allow that to happen and don't // bother routing the current view if (e.ctrlKey) { return; } // if middle click, don't update // current view if (e.button === 1) { return; } // otherwise, prevent default so // we can handle the view routing e.preventDefault(); } $location.update_path("/services/" + id, true); $scope.params.serviceId = id; }; // restart all running instances for this service $scope.killRunningInstances = function (app) { resourcesFactory.restartService(app.ID) .error((data, status) => { $notification.create("Stop Service failed", data.Detail).error(); }); }; $scope.getHostName = function (id) { if (hostsFactory.get(id)) { return hostsFactory.get(id).name; } else { // TODO - if unknown host, dont make linkable // and use custom css to show unknown return "unknown"; } }; // expand/collapse state of service tree nodes $scope.serviceTreeState = CCUIState.get($cookies.get("ZUsername"), "serviceTreeState"); // servicedTreeState is a collection of objects // describing if nodes in a service tree are hidden or collapsed. // It is first keyed by the id of the current service context (the // service who's name is at the top of the page), then keyed by // the service in question. eg: // // current service id // -> child service id // -> hidden // -> collapsed // -> child service id // -> hidden // -> collapsed // ... $scope.toggleChildren = function (service) { if (!$scope.currentService) { console.warn("Cannot store toggle state: no current service"); return; } if ($scope.currentTreeState[service.id].collapsed) { $scope.currentTreeState[service.id].collapsed = false; if (service.subservices.length) { $scope.showChildren(service); } else { service.fetchServiceChildren().then(() => { $scope.flattenServicesTree(); $scope.currentService.updateDescendentStatuses(); }); } } else { $scope.currentTreeState[service.id].collapsed = true; $scope.flattenServicesTree(); $scope.currentService.updateDescendentStatuses(); $scope.hideChildren(service); } }; $scope.getServiceEndpoints = function (id) { let deferred = $q.defer(); resourcesFactory.v2.getServiceEndpoints(id) .then(function (response) { console.log("got service endpoints for id " + id); deferred.resolve(response.data); }, function (response) { console.warn(response.status + " " + response.statusText); deferred.reject(response.statusText); }); return deferred.promise; }; $scope.hideChildren = function (service) { // get the state of the current service's tree var treeState = $scope.currentTreeState; if (service.subservices.length) { service.subservices.forEach(function (child) { treeState[child.id].hidden = true; $scope.hideChildren(child); }); } }; $scope.showChildren = function (service) { var treeState = $scope.currentTreeState; if (service.subservices.length) { service.subservices.forEach(function (child) { treeState[child.id].hidden = false; // if this child service is not marked // as collapsed, show its children if (!treeState[child.id].collapsed) { $scope.showChildren(child); } }); } }; //we need to bring this function into scope so we can use ng-hide if an object is empty $scope.isEmptyObject = function (obj) { return angular.equals({}, obj); }; $scope.isIsvc = function (service) { return service.isIsvc(); }; $scope.hasCurrentInstances = function () { return $scope.currentService && $scope.currentService.hasInstances(); }; $scope.editCurrentService = function () { // clone service for editing $scope.editableService = angular.copy($scope.currentService.model); $modalService.create({ templateUrl: "edit-service.html", model: $scope, title: "title_edit_service", actions: [ { role: "cancel" }, { role: "ok", label: "btn_save_changes", action: function () { if (this.validate()) { // disable ok button, and store the re-enable function var enableSubmit = this.disableSubmitButton(); // update service with recently edited service resourcesFactory.v2.updateService($scope.editableService.ID, $scope.editableService) .success(function (data, status) { $notification.create("Updated service", $scope.editableService.Name).success(); update(); this.close(); }.bind(this)) .error(function (data, status) { this.createNotification("Update service failed", data.Detail).error(); enableSubmit(); }.bind(this)); } } } ], validate: function () { if ($scope.editableService.InstanceLimits.Min > $scope.editableService.Instances || $scope.editableService.Instances === undefined) { return false; } return true; } }); }; // TODO - clean up magic numbers $scope.calculateIndent = function (depth) { let offset = 1; if ($scope.currentService && $scope.currentService.parent) { offset = $scope.currentService.parent.depth + 2; } return $scope.indent(depth - offset); }; $scope.setCurrentTreeState = function () { $scope.serviceTreeState[$scope.currentService.id] = {}; }; $scope.flattenServicesTree = function () { // flatten the current service's subservices tree. let rows = []; let treeState = $scope.currentTreeState; (function flatten(service, depth) { if (!treeState[service.id]) { treeState[service.id] = { collapsed: true, hidden: false }; } let rowState = treeState[service.id]; // TODO - merge rather than overwrite to avoid // causing the entire tree to bounce let rowItem = { service: service, depth: depth, collapsed: rowState.collapsed, hidden: rowState.hidden }; rows.push(rowItem); if (service.subservices.length) { $filter('orderBy')(service.subservices, 'name') .forEach(svc => flatten(svc, depth + 1)); } })($scope.currentService, 0); // rows[0] is always the top level service, so slice that off $scope.currentDescendents = rows.slice(1); }; $scope.fetchBreadcrumbs = function () { resourcesFactory.v2.getServiceAncestors($scope.currentService.id) .then(current => { $scope.breadcrumbs = makeCrumbs(current); }, error => { console.warn(error); }); }; // constructs a new current service $scope.setCurrentService = function () { $scope.currentService = undefined; resourcesFactory.v2.getService($scope.params.serviceId) .then(function (model) { $scope.currentService = new Service(model); $scope.currentDescendents = []; $scope.currentService.fetchServiceChildren() .then(() => { $scope.flattenServicesTree(); $scope.currentService.updateDescendentStatuses(); }); // sets $scope.breadcrumbs $scope.fetchBreadcrumbs(); // update serviceTreeState $scope.setCurrentTreeState(); // property for view to bind for tree state NOTE: WHA???? $scope.currentTreeState = $scope.serviceTreeState[$scope.currentService.id]; // fetchAll() will trigger update at completion $scope.currentService.fetchAll(true); // update fast-moving statuses $scope.currentService.fetchAllStates(); }); }; function refreshEndpoints() { $scope.currentService.fetchEndpoints(true); } function update() { // update service model data resourcesFactory.v2.getService($scope.params.serviceId) .then(function (model) { $scope.currentService.update(model); }); // update fast-moving statuses $scope.currentService.fetchAllStates(); $scope.currentService.updateDescendentStatuses(); } function init() { $scope.name = "servicedetails"; $scope.params = $routeParams; $scope.breadcrumbs = [ { label: 'breadcrumb_deployed', url: '/apps' } ]; $scope.publicEndpointsTable = { sorting: { ServiceEndpoint: "asc" } }; $scope.ipsTable = { sorting: { ServiceName: "asc" } }; $scope.configTable = { sorting: { Filename: "asc" } }; $scope.instancesTable = { sorting: { "model.InstanceID": "asc" }, // instead of watching for a change, always // reload at a specified interval watchExpression: (function () { var last = new Date().getTime(), now, interval = 1000; return function () { now = new Date().getTime(); if (now - last > interval) { last = now; return now; } }; })() }; // servicesTable should not be sortable since it // is a hierarchy. $scope.servicesTable = { disablePagination: true }; $scope.ips = {}; // pools are needed for edit service dialog $scope.pools = poolsFactory.poolList; // if the current service changes, update // various service controller thingies $scope.$watch("params.serviceId", $scope.setCurrentService); hostsFactory.activate(); hostsFactory.update(); // TODO - use UI_POLL_INTERVAL let intervalVal = setInterval(function () { if ($scope.currentService) { $scope.currentService.fetchAllStates(); $scope.currentService.updateDescendentStatuses(); } }, 3000); poolsFactory.activate(); poolsFactory.update(); $scope.$on("$destroy", function () { clearInterval(intervalVal); // servicesFactory.deactivate(); hostsFactory.deactivate(); poolsFactory.deactivate(); }); } // kick off controller init(); function makeCrumbs(current) { var crumbs = [{ label: current.Name, itemClass: "active", id: current.ID }]; (function recurse(service) { if (service) { crumbs.unshift({ label: service.Name, url: "/services/" + service.ID, id: service.ID }); recurse(service.Parent); } })(current.Parent); crumbs.unshift({ label: "Applications", url: "/apps" }); return crumbs; } }]); })();
CC-2774 Cannot remove public endpoint Additional changes for enable/disable endpoint.
web/ui/src/Services/ServiceDetailsController.js
CC-2774 Cannot remove public endpoint
<ide><path>eb/ui/src/Services/ServiceDetailsController.js <ide> <ide> $scope.clickEndpointEnable = function (publicEndpoint) { <ide> if ($scope.getEndpointType(publicEndpoint) === "vhost") { <del> resourcesFactory.enableVHost(publicEndpoint.ApplicationId, newPublicEndpoint.Application, newPublicEndpoint.ServiceEndpoint, newPublicEndpoint.Name) <add> resourcesFactory.enableVHost(publicEndpoint.ApplicationId, publicEndpoint.Application, publicEndpoint.ServiceEndpoint, publicEndpoint.Name) <ide> .error((data, status) => { <ide> $notification.create("Enable Public Endpoint failed", data.Detail).error(); <ide> }); <ide> } else if ($scope.getEndpointType(publicEndpoint) === "port") { <del> resourcesFactory.enablePort(publicEndpoint.ApplicationId, newPublicEndpoint.Application, newPublicEndpoint.ServiceEndpoint, newPublicEndpoint.PortAddr) <add> resourcesFactory.enablePort(publicEndpoint.ApplicationId, publicEndpoint.Application, publicEndpoint.ServiceEndpoint, publicEndpoint.PortAddr) <ide> .error((data, status) => { <ide> $notification.create("Enable Public Endpoint failed", data.Detail).error(); <ide> });
Java
apache-2.0
d724081dae8332f28e2b899191cd8509957b98cf
0
remkop/picocli,remkop/picocli,remkop/picocli,remkop/picocli
/* Copyright 2017 Remko Popma Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package picocli; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.lang.reflect.Field; import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import picocli.CommandLine.Command; import picocli.CommandLine.Option; import picocli.CommandLine.Parameters; import static java.lang.String.*; /** * Generates a bash auto-complete script. */ public class AutoComplete { private AutoComplete() { } public static void main(String[] args) { CommandLine.run(new App(), System.err, args); } @Command(name = "picocli.AutoComplete", description = "") private static class App implements Runnable { @Option(names = {"-n", "--name"}, description = "Name of the command to create a completion script for. " + "When omitted, the annotated class @Command 'name' attribute is used. " + "If no @Command 'name' attribute exists, '<CLASS-SIMPLE-NAME>' (in lower-case) is used.") String commandName; @Option(names = {"-o", "--completionScript"}, description = "Name of the completion script file to generate. " + "When omitted, a file named '<commandName>_completion' " + "is generated in the current directory.") File autoCompleteScript; @Option(names = {"-w", "--writeCommandScript"}, description = "Write a '<commandName>' sample command script to the same directory " + "as the completion script.") boolean writeCommandScript; @Parameters(arity = "1", description = "Fully qualified class name of the annotated " + "@Command class to generate a completion script for.") String commandLineFQCN; @Option(names = {"-f", "--force"}, description = "Overwrite existing script files.") boolean overwriteIfExists; @Override public void run() { try { Class<?> cls = Class.forName(commandLineFQCN); CommandLine commandLine = new CommandLine(cls.newInstance()); if (commandName == null) { commandName = new CommandLine.Help(commandLine.getCommand()).commandName; if (CommandLine.Help.DEFAULT_COMMAND_NAME.equals(commandName)) { commandName = cls.getSimpleName().toLowerCase(); } } if (autoCompleteScript == null) { autoCompleteScript = new File(commandName + "_completion"); } File commandScript = null; if (writeCommandScript) { commandScript = new File(autoCompleteScript.getAbsoluteFile().getParentFile(), commandName); } if (commandScript != null && !overwriteIfExists && checkExists(commandScript)) { return; } if (!overwriteIfExists && checkExists(autoCompleteScript)) { return; } AutoComplete.bash(commandName, autoCompleteScript, commandScript, commandLine); } catch (Exception ex) { ex.printStackTrace(); } } private boolean checkExists(final File file) { if (file.exists()) { System.err.println(file.getAbsolutePath() + " exists. Specify -f to overwrite."); CommandLine.usage(this, System.err); return true; } return false; } } private static interface Function<T, V> { V apply(T t); } /** * Drops all characters that are not valid for bash function and identifier names. */ private static class Bashify implements Function<CharSequence, String> { public String apply(CharSequence value) { return bashify(value); } } private static String bashify(CharSequence value) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); if (Character.isLetterOrDigit(c) || c == '_') { builder.append(c); } else if (Character.isSpaceChar(c)) { builder.append('_'); } } return builder.toString(); } private static class EnumNameFunction implements Function<Enum<?>, String> { @Override public String apply(final Enum<?> anEnum) { return anEnum.name(); } } private static class NullFunction implements Function<CharSequence, String> { @Override public String apply(CharSequence value) { return value.toString(); } } private static interface Predicate<T> { boolean test(T t); } private static class BooleanFieldFilter implements Predicate<Field> { @Override public boolean test(Field f) { return f.getType() == Boolean.TYPE || f.getType() == Boolean.class; } } private static class EnumFieldFilter implements Predicate<Field> { @Override public boolean test(Field f) { return f.getType().isEnum(); } } private static <T> Predicate<T> negate(final Predicate<T> original) { return new Predicate<T>() { @Override public boolean test(T t) { return !original.test(t); } }; } private static <T> List<T> filter(List<T> list, Predicate<T> filter) { List<T> result = new ArrayList<T>(); for (T t : list) { if (filter.test(t)) { result.add(t); } } return result; } private static class CommandDescriptor { final String functionName; final String commandName; CommandDescriptor(String functionName, String commandName) { this.functionName = functionName; this.commandName = commandName; } public int hashCode() { return functionName.hashCode() * 37 + commandName.hashCode(); } public boolean equals(Object obj) { if (!(obj instanceof CommandDescriptor)) { return false; } if (obj == this) { return true; } CommandDescriptor other = (CommandDescriptor) obj; return other.functionName.equals(functionName) && other.commandName.equals(commandName); } } private static final String HEADER = "" + "#!/usr/bin/env bash\n" + "#\n" + "# %1$s Bash Completion\n" + "# =======================\n" + "#\n" + "# Bash completion support for the `%1$s` command,\n" + "# generated by [picocli](http://picocli.info/).\n" + "#\n" + "# Installation\n" + "# ------------\n" + "#\n" + "# 1. Place this file in a `bash-completion.d` folder:\n" + "#\n" + "# * /etc/bash-completion.d\n" + "# * /usr/local/etc/bash-completion.d\n" + "# * ~/bash-completion.d\n" + "#\n" + "# 2. Open a new bash console, and type `%1$s [TAB][TAB]`\n" + "#\n" + "# Documentation\n" + "# -------------\n" + "# The script is called by bash whenever [TAB] or [TAB][TAB] is pressed after\n" + "# '%1$s (..)'. By reading entered command line parameters, it determines possible\n" + "# bash completions and writes them to the COMPREPLY variable. Bash then\n" + "# completes the user input if only one entry is listed in the variable or\n" + "# shows the options if more than one is listed in COMPREPLY.\n" + "#\n" + "# References\n" + "# ----------\n" + "# [1] http://stackoverflow.com/a/12495480/1440785\n" + "# [2] http://tiswww.case.edu/php/chet/bash/FAQ\n" + "# [3] https://www.gnu.org/software/bash/manual/html_node/The-Shopt-Builtin.html\n" + "# [4] https://stackoverflow.com/questions/17042057/bash-check-element-in-array-for-elements-in-another-array/17042655#17042655\n" + "# [5] https://www.gnu.org/software/bash/manual/html_node/Programmable-Completion.html#Programmable-Completion\n" + "#\n" + "\n" + "# Enable programmable completion facilities (see [3])\n" + "shopt -s progcomp\n" + "\n" + "# ArrContains takes two arguments, both of which are the name of arrays.\n" + "# It creates a temporary hash from lArr1 and then checks if all elements of lArr2\n" + "# are in the hashtable.\n" + "#\n" + "# Returns zero (no error) if all elements of the 2nd array are in the 1st array,\n" + "# otherwise returns 1 (error).\n" + "#\n" + "# Modified from [4]\n" + "function ArrContains() {\n" + " local lArr1 lArr2\n" + " declare -A tmp\n" + " eval lArr1=(\"\\\"\\${$1[@]}\\\"\")\n" + " eval lArr2=(\"\\\"\\${$2[@]}\\\"\")\n" + " for i in \"${lArr1[@]}\";{ [ -n \"$i\" ] && ((++tmp['$i']));}\n" + " for i in \"${lArr2[@]}\";{ [ -z \"${tmp[$i]}\" ] && return 1;}\n" + " return 0\n" + "}\n" + "\n"; private static final String FOOTER = "" + "\n" + "# Define a completion specification (a compspec) for the\n" + "# `%1$s`, `%1$s.sh`, and `%1$s.bash` commands.\n" + "# Uses the bash `complete` builtin (see [5]) to specify that shell function\n" + "# `_complete_%1$s` is responsible for generating possible completions for the\n" + "# current word on the command line.\n" + "# The `-o default` option means that if the function generated no matches, the\n" + "# default Bash completions and the Readline default filename completions are performed.\n" + "complete -F _complete_%1$s -o default %1$s %1$s.sh %1$s.bash\n"; public static void bash(String scriptName, File out, File command, CommandLine commandLine) throws IOException { String autoCompleteScript = bash(scriptName, commandLine); Writer completionWriter = null; Writer scriptWriter = null; try { completionWriter = new FileWriter(out); completionWriter.write(autoCompleteScript); if (command != null) { scriptWriter = new FileWriter(command); scriptWriter.write("" + "#!/usr/bin/env bash\n" + "\n" + "LIBS=path/to/libs\n" + "CP=\"${LIBS}/myApp.jar\"\n" + "java -cp \"${CP}\" '" + commandLine.getCommand().getClass().getName() + "' $@"); } } finally { if (completionWriter != null) { completionWriter.close(); } if (scriptWriter != null) { scriptWriter.close(); } } } public static String bash(String scriptName, CommandLine commandLine) { if (scriptName == null) { throw new NullPointerException("scriptName"); } if (commandLine == null) { throw new NullPointerException("commandLine"); } String result = ""; result += format(HEADER, scriptName); Map<CommandDescriptor, CommandLine> function2command = new LinkedHashMap<CommandDescriptor, CommandLine>(); result += generateEntryPointFunction(scriptName, commandLine, function2command); for (Map.Entry<CommandDescriptor, CommandLine> functionSpec : function2command.entrySet()) { CommandDescriptor descriptor = functionSpec.getKey(); result += generateFunctionForCommand(descriptor.functionName, descriptor.commandName, functionSpec.getValue()); } result += format(FOOTER, scriptName); return result; } private static String generateEntryPointFunction(String scriptName, CommandLine commandLine, Map<CommandDescriptor, CommandLine> function2command) { String HEADER = "" + "# Bash completion entry point function.\n" + "# _complete_%1$s finds which commands and subcommands have been specified\n" + "# on the command line and delegates to the appropriate function\n" + "# to generate possible options and subcommands for the last specified subcommand.\n" + "function _complete_%1$s() {\n" + // " CMDS1=(%1$s gettingstarted)\n" + // " CMDS2=(%1$s tool)\n" + // " CMDS3=(%1$s tool sub1)\n" + // " CMDS4=(%1$s tool sub2)\n" + // "\n" + // " ArrContains COMP_WORDS CMDS4 && { _picocli_basic_tool_sub2; return $?; }\n" + // " ArrContains COMP_WORDS CMDS3 && { _picocli_basic_tool_sub1; return $?; }\n" + // " ArrContains COMP_WORDS CMDS2 && { _picocli_basic_tool; return $?; }\n" + // " ArrContains COMP_WORDS CMDS1 && { _picocli_basic_gettingstarted; return $?; }\n" + // " _picocli_%1$s; return $?;\n" + // "}\n" + // "\n" + // "complete -F _complete_%1$s %1$s\n" + // "\n"; ""; String FOOTER = "\n" + " # No subcommands were specified; generate completions for the top-level command.\n" + " _picocli_%1$s; return $?;\n" + "}\n"; StringBuilder buff = new StringBuilder(1024); buff.append(format(HEADER, scriptName)); List<String> predecessors = new ArrayList<String>(); List<String> functionCallsToArrContains = new ArrayList<String>(); function2command.put(new CommandDescriptor("_picocli_" + scriptName, scriptName), commandLine); generateFunctionCallsToArrContains(scriptName, predecessors, commandLine, buff, functionCallsToArrContains, function2command); buff.append("\n"); Collections.reverse(functionCallsToArrContains); for (String func : functionCallsToArrContains) { buff.append(func); } buff.append(format(FOOTER, scriptName)); return buff.toString(); } private static void generateFunctionCallsToArrContains(String scriptName, List<String> predecessors, CommandLine commandLine, StringBuilder buff, List<String> functionCalls, Map<CommandDescriptor, CommandLine> function2command) { // breadth-first: generate command lists and function calls for predecessors + each subcommand for (Map.Entry<String, CommandLine> entry : commandLine.getSubcommands().entrySet()) { int count = functionCalls.size(); String functionName = "_picocli_" + scriptName + "_" + concat("_", predecessors, entry.getKey(), new Bashify()); functionCalls.add(format(" ArrContains COMP_WORDS CMDS%2$d && { %1$s; return $?; }\n", functionName, count)); buff.append( format(" CMDS%2$d=(%1$s)\n", concat(" ", predecessors, entry.getKey(), new Bashify()), count)); // remember the function name and associated subcommand so we can easily generate a function later function2command.put(new CommandDescriptor(functionName, entry.getKey()), entry.getValue()); } // then recursively do the same for all nested subcommands for (Map.Entry<String, CommandLine> entry : commandLine.getSubcommands().entrySet()) { predecessors.add(entry.getKey()); generateFunctionCallsToArrContains(scriptName, predecessors, entry.getValue(), buff, functionCalls, function2command); predecessors.remove(predecessors.size() - 1); } } private static String concat(String infix, String... values) { return concat(infix, Arrays.asList(values)); } private static String concat(String infix, List<String> values) { return concat(infix, values, null, new NullFunction()); } private static <V, T extends V> String concat(String infix, List<T> values, T lastValue, Function<V, String> normalize) { StringBuilder sb = new StringBuilder(); for (T val : values) { if (sb.length() > 0) { sb.append(infix); } sb.append(normalize.apply(val)); } if (lastValue == null) { return sb.toString(); } if (sb.length() > 0) { sb.append(infix); } return sb.append(normalize.apply(lastValue)).toString(); } private static String generateFunctionForCommand(String functionName, String commandName, CommandLine commandLine) { String HEADER = "" + "\n" + "# Generates completions for the options and subcommands of the `%s` %scommand.\n" + "function %s() {\n" + " # Get completion data\n" + " CURR_WORD=${COMP_WORDS[COMP_CWORD]}\n" + " PREV_WORD=${COMP_WORDS[COMP_CWORD-1]}\n" + "\n" + " COMMANDS=\"%s\"\n" + // COMMANDS="gettingstarted tool" " FLAG_OPTS=\"%s\"\n" + // FLAG_OPTS="--verbose -V -x --extract -t --list" " ARG_OPTS=\"%s\"\n"; // ARG_OPTS="--host --option --file -f -u --timeUnit" String FOOTER = "" + "\n" + " COMPREPLY=( $(compgen -W \"${FLAG_OPTS} ${ARG_OPTS} ${COMMANDS}\" -- ${CURR_WORD}) )\n" + "}\n"; // Get the fields annotated with @Option and @Parameters for the specified CommandLine. List<Field> optionFields = new ArrayList<Field>(); List<Field> positionalFields = new ArrayList<Field>(); extractOptionsAndParameters(commandLine, optionFields, positionalFields); // Build a list of "flag" options that take no parameters and "arg" options that do take parameters, and subcommands. String flagOptionNames = optionNames(filter(optionFields, new BooleanFieldFilter())); List<Field> argOptionFields = filter(optionFields, negate(new BooleanFieldFilter())); String argOptionNames = optionNames(argOptionFields); String commands = concat(" ", new ArrayList<String>(commandLine.getSubcommands().keySet())).trim(); // Generate the header: the function declaration, CURR_WORD, PREV_WORD and COMMANDS, FLAG_OPTS and ARG_OPTS. StringBuilder buff = new StringBuilder(1024); String sub = functionName.equals("_picocli_" + commandName) ? "" : "sub"; buff.append(format(HEADER, commandName, sub, functionName, commands, flagOptionNames, argOptionNames)); // Generate completion lists for options with a known set of valid values. // Starting with java enums. List<Field> enumOptions = filter(optionFields, new EnumFieldFilter()); for (Field f : enumOptions) { buff.append(format(" %s_OPTION_ARGS=\"%s\" # %s values\n", bashify(f.getName()), concat(" ", Arrays.asList((Enum[]) f.getType().getEnumConstants()), null, new EnumNameFunction()).trim(), f.getType().getSimpleName())); } // TODO generate completion lists for other option types: // Charset, Currency, Locale, TimeZone, ByteOrder, // javax.crypto.Cipher, javax.crypto.KeyGenerator, javax.crypto.Mac, javax.crypto.SecretKeyFactory // java.security.AlgorithmParameterGenerator, java.security.AlgorithmParameters, java.security.KeyFactory, java.security.KeyPairGenerator, java.security.KeyStore, java.security.MessageDigest, java.security.Signature // sql.Types? // Now generate the "case" switches for the options whose arguments we can generate completions for buff.append(generateOptionsSwitch(argOptionFields, enumOptions)); // Generate the footer: a default COMPREPLY to fall back to, and the function closing brace. buff.append(format(FOOTER)); return buff.toString(); } private static String generateOptionsSwitch(List<Field> argOptionFields, List<Field> enumOptions) { StringBuilder buff = new StringBuilder(1024); buff.append("\n"); buff.append(" case ${CURR_WORD} in\n"); // outer case String outerCases = generateOptionsCases(argOptionFields, enumOptions, "", "\"\""); if (outerCases.length() == 0) { return ""; } buff.append(outerCases); buff.append(" *)\n"); buff.append(" case ${PREV_WORD} in\n"); // inner case buff.append(generateOptionsCases(argOptionFields, enumOptions, " ", "$CURR_WORD")); buff.append(" esac\n"); // end inner case buff.append(" esac\n"); // end outer case return buff.toString(); } private static String generateOptionsCases(List<Field> argOptionFields, List<Field> enumOptions, String indent, String currWord) { StringBuilder buff = new StringBuilder(1024); for (Field f : argOptionFields) { Option option = f.getAnnotation(Option.class); if (enumOptions.contains(f)) { buff.append(format("%s %s)\n", indent, concat("|", option.names()))); // " -u|--timeUnit)\n" buff.append(format("%s COMPREPLY=( $( compgen -W \"${%s_OPTION_ARGS}\" -- %s ) )\n", indent, f.getName(), currWord)); buff.append(format("%s return $?\n", indent)); buff.append(format("%s ;;\n", indent)); } else if (f.getType().equals(File.class) || "java.nio.file.Path".equals(f.getType().getName())) { buff.append(format("%s %s)\n", indent, concat("|", option.names()))); // " -f|--file)\n" buff.append(format("%s compopt -o filenames\n", indent)); buff.append(format("%s COMPREPLY=( $( compgen -f -- %s ) ) # files\n", indent, currWord)); buff.append(format("%s return $?\n", indent)); buff.append(format("%s ;;\n", indent)); } else if (f.getType().equals(InetAddress.class)) { buff.append(format("%s %s)\n", indent, concat("|", option.names()))); // " -h|--host)\n" buff.append(format("%s compopt -o filenames\n", indent)); buff.append(format("%s COMPREPLY=( $( compgen -A hostname -- %s ) )\n", indent, currWord)); buff.append(format("%s return $?\n", indent)); buff.append(format("%s ;;\n", indent)); } } return buff.toString(); } private static String optionNames(List<Field> optionFields) { List<String> result = new ArrayList<String>(); for (Field f : optionFields) { Option option = f.getAnnotation(Option.class); result.addAll(Arrays.asList(option.names())); } return concat(" ", result, "", new NullFunction()).trim(); } private static void extractOptionsAndParameters(CommandLine commandLine, List<Field> optionFields, List<Field> positionalParameterFields) { Map<String, Field> optionName2Field = new LinkedHashMap<String, Field>(); Class<?> cls = commandLine.getCommand().getClass(); while (cls != null) { CommandLine.init(cls, new ArrayList<Field>(), optionName2Field, new HashMap<Character, Field>(), positionalParameterFields); cls = cls.getSuperclass(); } for (Field f : optionName2Field.values()) { if (!optionFields.contains(f)) { optionFields.add(f); } } } }
src/main/java/picocli/AutoComplete.java
/* Copyright 2017 Remko Popma Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package picocli; import java.io.File; import java.lang.reflect.Field; import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import static java.lang.String.*; /** * Generates a bash auto-complete script. */ public class AutoComplete { private AutoComplete() { } private static interface Function<T, V> { V apply(T t); } /** * Drops all characters that are not valid for bash function and identifier names. */ private static class Bashify implements Function<CharSequence, String> { public String apply(CharSequence value) { return bashify(value); } } private static String bashify(CharSequence value) { StringBuilder builder = new StringBuilder(); for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); if (Character.isLetterOrDigit(c) || c == '_') { builder.append(c); } else if (Character.isSpaceChar(c)) { builder.append('_'); } } return builder.toString(); } private static class EnumNameFunction implements Function<Enum<?>, String> { @Override public String apply(final Enum<?> anEnum) { return anEnum.name(); } } private static class NullFunction implements Function<CharSequence, String> { @Override public String apply(CharSequence value) { return value.toString(); } } private static interface Predicate<T> { boolean test(T t); } private static class BooleanFieldFilter implements Predicate<Field> { @Override public boolean test(Field f) { return f.getType() == Boolean.TYPE || f.getType() == Boolean.class; } } private static class EnumFieldFilter implements Predicate<Field> { @Override public boolean test(Field f) { return f.getType().isEnum(); } } private static <T> Predicate<T> negate(final Predicate<T> original) { return new Predicate<T>() { @Override public boolean test(T t) { return !original.test(t); } }; } private static <T> List<T> filter(List<T> list, Predicate<T> filter) { List<T> result = new ArrayList<T>(); for (T t : list) { if (filter.test(t)) { result.add(t); } } return result; } private static class CommandDescriptor { final String functionName; final String commandName; CommandDescriptor(String functionName, String commandName) { this.functionName = functionName; this.commandName = commandName; } public int hashCode() { return functionName.hashCode() * 37 + commandName.hashCode(); } public boolean equals(Object obj) { if (!(obj instanceof CommandDescriptor)) { return false; } if (obj == this) { return true; } CommandDescriptor other = (CommandDescriptor) obj; return other.functionName.equals(functionName) && other.commandName.equals(commandName); } } private static final String HEADER = "" + "#!/usr/bin/env bash\n" + "#\n" + "# %1$s Bash Completion\n" + "# =======================\n" + "#\n" + "# Bash completion support for the `%1$s` command,\n" + "# generated by [picocli](http://picocli.info/).\n" + "#\n" + "# Installation\n" + "# ------------\n" + "#\n" + "# 1. Place this file in a `bash-completion.d` folder:\n" + "#\n" + "# * /etc/bash-completion.d\n" + "# * /usr/local/etc/bash-completion.d\n" + "# * ~/bash-completion.d\n" + "#\n" + "# 2. Open a new bash console, and type `%1$s [TAB][TAB]`\n" + "#\n" + "# Documentation\n" + "# -------------\n" + "# The script is called by bash whenever [TAB] or [TAB][TAB] is pressed after\n" + "# '%1$s (..)'. By reading entered command line parameters, it determines possible\n" + "# bash completions and writes them to the COMPREPLY variable. Bash then\n" + "# completes the user input if only one entry is listed in the variable or\n" + "# shows the options if more than one is listed in COMPREPLY.\n" + "#\n" + "# References\n" + "# ----------\n" + "# [1] http://stackoverflow.com/a/12495480/1440785\n" + "# [2] http://tiswww.case.edu/php/chet/bash/FAQ\n" + "# [3] https://www.gnu.org/software/bash/manual/html_node/The-Shopt-Builtin.html\n" + "# [4] https://stackoverflow.com/questions/17042057/bash-check-element-in-array-for-elements-in-another-array/17042655#17042655\n" + "# [5] https://www.gnu.org/software/bash/manual/html_node/Programmable-Completion.html#Programmable-Completion\n" + "#\n" + "\n" + "# Enable programmable completion facilities (see [3])\n" + "shopt -s progcomp\n" + "\n" + "# ArrContains takes two arguments, both of which are the name of arrays.\n" + "# It creates a temporary hash from lArr1 and then checks if all elements of lArr2\n" + "# are in the hashtable.\n" + "#\n" + "# Returns zero (no error) if all elements of the 2nd array are in the 1st array,\n" + "# otherwise returns 1 (error).\n" + "#\n" + "# Modified from [4]\n" + "function ArrContains() {\n" + " local lArr1 lArr2\n" + " declare -A tmp\n" + " eval lArr1=(\"\\\"\\${$1[@]}\\\"\")\n" + " eval lArr2=(\"\\\"\\${$2[@]}\\\"\")\n" + " for i in \"${lArr1[@]}\";{ [ -n \"$i\" ] && ((++tmp['$i']));}\n" + " for i in \"${lArr2[@]}\";{ [ -z \"${tmp[$i]}\" ] && return 1;}\n" + " return 0\n" + "}\n" + "\n"; private static final String FOOTER = "" + "\n" + "# Define a completion specification (a compspec) for the\n" + "# `%1$s`, `%1$s.sh`, and `%1$s.bash` commands.\n" + "# Uses the bash `complete` builtin (see [5]) to specify that shell function\n" + "# `_complete_%1$s` is responsible for generating possible completions for the\n" + "# current word on the command line.\n" + "# The `-o default` option means that if the function generated no matches, the\n" + "# default Bash completions and the Readline default filename completions are performed.\n" + "complete -F _complete_%1$s -o default %1$s %1$s.sh %1$s.bash\n"; public static String bash(String scriptName, CommandLine commandLine) { if (scriptName == null) { throw new NullPointerException("scriptName"); } if (commandLine == null) { throw new NullPointerException("commandLine"); } String result = ""; result += format(HEADER, scriptName); Map<CommandDescriptor, CommandLine> function2command = new LinkedHashMap<CommandDescriptor, CommandLine>(); result += generateEntryPointFunction(scriptName, commandLine, function2command); for (Map.Entry<CommandDescriptor, CommandLine> functionSpec : function2command.entrySet()) { CommandDescriptor descriptor = functionSpec.getKey(); result += generateFunctionForCommand(descriptor.functionName, descriptor.commandName, functionSpec.getValue()); } result += format(FOOTER, scriptName); return result; } private static String generateEntryPointFunction(String scriptName, CommandLine commandLine, Map<CommandDescriptor, CommandLine> function2command) { String HEADER = "" + "# Bash completion entry point function.\n" + "# _complete_%1$s finds which commands and subcommands have been specified\n" + "# on the command line and delegates to the appropriate function\n" + "# to generate possible options and subcommands for the last specified subcommand.\n" + "function _complete_%1$s() {\n" + // " CMDS1=(%1$s gettingstarted)\n" + // " CMDS2=(%1$s tool)\n" + // " CMDS3=(%1$s tool sub1)\n" + // " CMDS4=(%1$s tool sub2)\n" + // "\n" + // " ArrContains COMP_WORDS CMDS4 && { _picocli_basic_tool_sub2; return $?; }\n" + // " ArrContains COMP_WORDS CMDS3 && { _picocli_basic_tool_sub1; return $?; }\n" + // " ArrContains COMP_WORDS CMDS2 && { _picocli_basic_tool; return $?; }\n" + // " ArrContains COMP_WORDS CMDS1 && { _picocli_basic_gettingstarted; return $?; }\n" + // " _picocli_%1$s; return $?;\n" + // "}\n" + // "\n" + // "complete -F _complete_%1$s %1$s\n" + // "\n"; ""; String FOOTER = "\n" + " # No subcommands were specified; generate completions for the top-level command.\n" + " _picocli_%1$s; return $?;\n" + "}\n"; StringBuilder buff = new StringBuilder(1024); buff.append(format(HEADER, scriptName)); List<String> predecessors = new ArrayList<String>(); List<String> functionCallsToArrContains = new ArrayList<String>(); function2command.put(new CommandDescriptor("_picocli_" + scriptName, scriptName), commandLine); generateFunctionCallsToArrContains(scriptName, predecessors, commandLine, buff, functionCallsToArrContains, function2command); buff.append("\n"); Collections.reverse(functionCallsToArrContains); for (String func : functionCallsToArrContains) { buff.append(func); } buff.append(format(FOOTER, scriptName)); return buff.toString(); } private static void generateFunctionCallsToArrContains(String scriptName, List<String> predecessors, CommandLine commandLine, StringBuilder buff, List<String> functionCalls, Map<CommandDescriptor, CommandLine> function2command) { // breadth-first: generate command lists and function calls for predecessors + each subcommand for (Map.Entry<String, CommandLine> entry : commandLine.getSubcommands().entrySet()) { int count = functionCalls.size(); String functionName = "_picocli_" + scriptName + "_" + concat("_", predecessors, entry.getKey(), new Bashify()); functionCalls.add(format(" ArrContains COMP_WORDS CMDS%2$d && { %1$s; return $?; }\n", functionName, count)); buff.append( format(" CMDS%2$d=(%1$s)\n", concat(" ", predecessors, entry.getKey(), new Bashify()), count)); // remember the function name and associated subcommand so we can easily generate a function later function2command.put(new CommandDescriptor(functionName, entry.getKey()), entry.getValue()); } // then recursively do the same for all nested subcommands for (Map.Entry<String, CommandLine> entry : commandLine.getSubcommands().entrySet()) { predecessors.add(entry.getKey()); generateFunctionCallsToArrContains(scriptName, predecessors, entry.getValue(), buff, functionCalls, function2command); predecessors.remove(predecessors.size() - 1); } } private static String concat(String infix, String... values) { return concat(infix, Arrays.asList(values)); } private static String concat(String infix, List<String> values) { return concat(infix, values, null, new NullFunction()); } private static <V, T extends V> String concat(String infix, List<T> values, T lastValue, Function<V, String> normalize) { StringBuilder sb = new StringBuilder(); for (T val : values) { if (sb.length() > 0) { sb.append(infix); } sb.append(normalize.apply(val)); } if (lastValue == null) { return sb.toString(); } if (sb.length() > 0) { sb.append(infix); } return sb.append(normalize.apply(lastValue)).toString(); } private static String generateFunctionForCommand(String functionName, String commandName, CommandLine commandLine) { String HEADER = "" + "\n" + "# Generates completions for the options and subcommands of the `%s` %scommand.\n" + "function %s() {\n" + " # Get completion data\n" + " CURR_WORD=${COMP_WORDS[COMP_CWORD]}\n" + " PREV_WORD=${COMP_WORDS[COMP_CWORD-1]}\n" + "\n" + " COMMANDS=\"%s\"\n" + // COMMANDS="gettingstarted tool" " FLAG_OPTS=\"%s\"\n" + // FLAG_OPTS="--verbose -V -x --extract -t --list" " ARG_OPTS=\"%s\"\n"; // ARG_OPTS="--host --option --file -f -u --timeUnit" String FOOTER = "" + "\n" + " COMPREPLY=( $(compgen -W \"${FLAG_OPTS} ${ARG_OPTS} ${COMMANDS}\" -- ${CURR_WORD}) )\n" + "}\n"; // Get the fields annotated with @Option and @Parameters for the specified CommandLine. List<Field> optionFields = new ArrayList<Field>(); List<Field> positionalFields = new ArrayList<Field>(); extractOptionsAndParameters(commandLine, optionFields, positionalFields); // Build a list of "flag" options that take no parameters and "arg" options that do take parameters, and subcommands. String flagOptionNames = optionNames(filter(optionFields, new BooleanFieldFilter())); List<Field> argOptionFields = filter(optionFields, negate(new BooleanFieldFilter())); String argOptionNames = optionNames(argOptionFields); String commands = concat(" ", new ArrayList<String>(commandLine.getSubcommands().keySet())).trim(); // Generate the header: the function declaration, CURR_WORD, PREV_WORD and COMMANDS, FLAG_OPTS and ARG_OPTS. StringBuilder buff = new StringBuilder(1024); String sub = functionName.equals("_picocli_" + commandName) ? "" : "sub"; buff.append(format(HEADER, commandName, sub, functionName, commands, flagOptionNames, argOptionNames)); // Generate completion lists for options with a known set of valid values. // Starting with java enums. List<Field> enumOptions = filter(optionFields, new EnumFieldFilter()); for (Field f : enumOptions) { buff.append(format(" %s_OPTION_ARGS=\"%s\" # %s values\n", bashify(f.getName()), concat(" ", Arrays.asList((Enum[]) f.getType().getEnumConstants()), null, new EnumNameFunction()).trim(), f.getType().getSimpleName())); } // TODO generate completion lists for other option types: // Charset, Currency, Locale, TimeZone, ByteOrder, // javax.crypto.Cipher, javax.crypto.KeyGenerator, javax.crypto.Mac, javax.crypto.SecretKeyFactory // java.security.AlgorithmParameterGenerator, java.security.AlgorithmParameters, java.security.KeyFactory, java.security.KeyPairGenerator, java.security.KeyStore, java.security.MessageDigest, java.security.Signature // sql.Types? // Now generate the "case" switches for the options whose arguments we can generate completions for buff.append(generateOptionsSwitch(argOptionFields, enumOptions)); // Generate the footer: a default COMPREPLY to fall back to, and the function closing brace. buff.append(format(FOOTER)); return buff.toString(); } private static String generateOptionsSwitch(List<Field> argOptionFields, List<Field> enumOptions) { StringBuilder buff = new StringBuilder(1024); buff.append("\n"); buff.append(" case ${CURR_WORD} in\n"); // outer case String outerCases = generateOptionsCases(argOptionFields, enumOptions, "", "\"\""); if (outerCases.length() == 0) { return ""; } buff.append(outerCases); buff.append(" *)\n"); buff.append(" case ${PREV_WORD} in\n"); // inner case buff.append(generateOptionsCases(argOptionFields, enumOptions, " ", "$CURR_WORD")); buff.append(" esac\n"); // end inner case buff.append(" esac\n"); // end outer case return buff.toString(); } private static String generateOptionsCases(List<Field> argOptionFields, List<Field> enumOptions, String indent, String currWord) { StringBuilder buff = new StringBuilder(1024); for (Field f : argOptionFields) { CommandLine.Option option = f.getAnnotation(CommandLine.Option.class); if (enumOptions.contains(f)) { buff.append(format("%s %s)\n", indent, concat("|", option.names()))); // " -u|--timeUnit)\n" buff.append(format("%s COMPREPLY=( $( compgen -W \"${%s_OPTION_ARGS}\" -- %s ) )\n", indent, f.getName(), currWord)); buff.append(format("%s return $?\n", indent)); buff.append(format("%s ;;\n", indent)); } else if (f.getType().equals(File.class) || "java.nio.file.Path".equals(f.getType().getName())) { buff.append(format("%s %s)\n", indent, concat("|", option.names()))); // " -f|--file)\n" buff.append(format("%s compopt -o filenames\n", indent)); buff.append(format("%s COMPREPLY=( $( compgen -f -- %s ) ) # files\n", indent, currWord)); buff.append(format("%s return $?\n", indent)); buff.append(format("%s ;;\n", indent)); } else if (f.getType().equals(InetAddress.class)) { buff.append(format("%s %s)\n", indent, concat("|", option.names()))); // " -h|--host)\n" buff.append(format("%s compopt -o filenames\n", indent)); buff.append(format("%s COMPREPLY=( $( compgen -A hostname -- %s ) )\n", indent, currWord)); buff.append(format("%s return $?\n", indent)); buff.append(format("%s ;;\n", indent)); } } return buff.toString(); } private static String optionNames(List<Field> optionFields) { List<String> result = new ArrayList<String>(); for (Field f : optionFields) { CommandLine.Option option = f.getAnnotation(CommandLine.Option.class); result.addAll(Arrays.asList(option.names())); } return concat(" ", result, "", new NullFunction()).trim(); } private static void extractOptionsAndParameters(CommandLine commandLine, List<Field> optionFields, List<Field> positionalParameterFields) { Map<String, Field> optionName2Field = new LinkedHashMap<String, Field>(); Class<?> cls = commandLine.getCommand().getClass(); while (cls != null) { CommandLine.init(cls, new ArrayList<Field>(), optionName2Field, new HashMap<Character, Field>(), positionalParameterFields); cls = cls.getSuperclass(); } for (Field f : optionName2Field.values()) { if (!optionFields.contains(f)) { optionFields.add(f); } } } }
#121 add main method to run picocli.AutoComplete as an application
src/main/java/picocli/AutoComplete.java
#121 add main method to run picocli.AutoComplete as an application
<ide><path>rc/main/java/picocli/AutoComplete.java <ide> package picocli; <ide> <ide> import java.io.File; <add>import java.io.FileWriter; <add>import java.io.IOException; <add>import java.io.Writer; <ide> import java.lang.reflect.Field; <ide> import java.net.InetAddress; <ide> import java.util.ArrayList; <ide> import java.util.List; <ide> import java.util.Map; <ide> <add>import picocli.CommandLine.Command; <add>import picocli.CommandLine.Option; <add>import picocli.CommandLine.Parameters; <add> <ide> import static java.lang.String.*; <ide> <ide> /** <ide> */ <ide> public class AutoComplete { <ide> private AutoComplete() { <add> } <add> <add> public static void main(String[] args) { CommandLine.run(new App(), System.err, args); } <add> <add> @Command(name = "picocli.AutoComplete", description = "") <add> private static class App implements Runnable { <add> <add> @Option(names = {"-n", "--name"}, description = "Name of the command to create a completion script for. " + <add> "When omitted, the annotated class @Command 'name' attribute is used. " + <add> "If no @Command 'name' attribute exists, '<CLASS-SIMPLE-NAME>' (in lower-case) is used.") <add> String commandName; <add> <add> @Option(names = {"-o", "--completionScript"}, <add> description = "Name of the completion script file to generate. " + <add> "When omitted, a file named '<commandName>_completion' " + <add> "is generated in the current directory.") <add> File autoCompleteScript; <add> <add> @Option(names = {"-w", "--writeCommandScript"}, <add> description = "Write a '<commandName>' sample command script to the same directory " + <add> "as the completion script.") <add> boolean writeCommandScript; <add> <add> @Parameters(arity = "1", description = "Fully qualified class name of the annotated " + <add> "@Command class to generate a completion script for.") <add> String commandLineFQCN; <add> <add> @Option(names = {"-f", "--force"}, description = "Overwrite existing script files.") <add> boolean overwriteIfExists; <add> <add> @Override <add> public void run() { <add> try { <add> Class<?> cls = Class.forName(commandLineFQCN); <add> CommandLine commandLine = new CommandLine(cls.newInstance()); <add> <add> if (commandName == null) { <add> commandName = new CommandLine.Help(commandLine.getCommand()).commandName; <add> if (CommandLine.Help.DEFAULT_COMMAND_NAME.equals(commandName)) { <add> commandName = cls.getSimpleName().toLowerCase(); <add> } <add> } <add> if (autoCompleteScript == null) { <add> autoCompleteScript = new File(commandName + "_completion"); <add> } <add> File commandScript = null; <add> if (writeCommandScript) { <add> commandScript = new File(autoCompleteScript.getAbsoluteFile().getParentFile(), commandName); <add> } <add> if (commandScript != null && !overwriteIfExists && checkExists(commandScript)) { return; } <add> if (!overwriteIfExists && checkExists(autoCompleteScript)) { return; } <add> AutoComplete.bash(commandName, autoCompleteScript, commandScript, commandLine); <add> } catch (Exception ex) { <add> ex.printStackTrace(); <add> } <add> } <add> <add> private boolean checkExists(final File file) { <add> if (file.exists()) { <add> System.err.println(file.getAbsolutePath() + " exists. Specify -f to overwrite."); <add> CommandLine.usage(this, System.err); <add> return true; <add> } <add> return false; <add> } <ide> } <ide> <ide> private static interface Function<T, V> { <ide> "# default Bash completions and the Readline default filename completions are performed.\n" + <ide> "complete -F _complete_%1$s -o default %1$s %1$s.sh %1$s.bash\n"; <ide> <add> public static void bash(String scriptName, File out, File command, CommandLine commandLine) throws IOException { <add> String autoCompleteScript = bash(scriptName, commandLine); <add> Writer completionWriter = null; <add> Writer scriptWriter = null; <add> try { <add> completionWriter = new FileWriter(out); <add> completionWriter.write(autoCompleteScript); <add> <add> if (command != null) { <add> scriptWriter = new FileWriter(command); <add> scriptWriter.write("" + <add> "#!/usr/bin/env bash\n" + <add> "\n" + <add> "LIBS=path/to/libs\n" + <add> "CP=\"${LIBS}/myApp.jar\"\n" + <add> "java -cp \"${CP}\" '" + commandLine.getCommand().getClass().getName() + "' $@"); <add> } <add> } finally { <add> if (completionWriter != null) { completionWriter.close(); } <add> if (scriptWriter != null) { scriptWriter.close(); } <add> } <add> } <ide> public static String bash(String scriptName, CommandLine commandLine) { <ide> if (scriptName == null) { throw new NullPointerException("scriptName"); } <ide> if (commandLine == null) { throw new NullPointerException("commandLine"); } <ide> private static String generateOptionsCases(List<Field> argOptionFields, List<Field> enumOptions, String indent, String currWord) { <ide> StringBuilder buff = new StringBuilder(1024); <ide> for (Field f : argOptionFields) { <del> CommandLine.Option option = f.getAnnotation(CommandLine.Option.class); <add> Option option = f.getAnnotation(Option.class); <ide> if (enumOptions.contains(f)) { <ide> buff.append(format("%s %s)\n", indent, concat("|", option.names()))); // " -u|--timeUnit)\n" <ide> buff.append(format("%s COMPREPLY=( $( compgen -W \"${%s_OPTION_ARGS}\" -- %s ) )\n", indent, f.getName(), currWord)); <ide> private static String optionNames(List<Field> optionFields) { <ide> List<String> result = new ArrayList<String>(); <ide> for (Field f : optionFields) { <del> CommandLine.Option option = f.getAnnotation(CommandLine.Option.class); <add> Option option = f.getAnnotation(Option.class); <ide> result.addAll(Arrays.asList(option.names())); <ide> } <ide> return concat(" ", result, "", new NullFunction()).trim();
Java
mit
4a7905b4117cc1011c4a3c40bf4d953811c14638
0
venkatramanm/swf-all,venkatramanm/swf-all,venkatramanm/swf-all
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.venky.swf.db.table; import com.venky.cache.Cache; import com.venky.core.collections.IgnoreCaseList; import com.venky.core.collections.LowerCaseStringCache; import com.venky.core.collections.SequenceMap; import com.venky.core.collections.SequenceSet; import com.venky.core.log.SWFLogger; import com.venky.core.log.TimerStatistics.Timer; import com.venky.core.string.StringUtil; import com.venky.core.util.MultiException; import com.venky.core.util.ObjectUtil; import com.venky.extension.Registry; import com.venky.swf.db.Database; import com.venky.swf.db.JdbcTypeHelper.TypeConverter; import com.venky.swf.db.JdbcTypeHelper.TypeRef; import com.venky.swf.db.annotations.column.COLUMN_DEF; import com.venky.swf.db.annotations.column.IS_VIRTUAL; import com.venky.swf.db.annotations.column.defaulting.StandardDefaulter; import com.venky.swf.db.annotations.column.pm.PARTICIPANT; import com.venky.swf.db.annotations.column.relationship.CONNECTED_VIA; import com.venky.swf.db.annotations.column.validations.processors.*; import com.venky.swf.db.annotations.model.CONFIGURATION; import com.venky.swf.db.annotations.model.validations.ModelValidator; import com.venky.swf.db.annotations.model.validations.UniqueKeyValidator; import com.venky.swf.db.model.Model; import com.venky.swf.db.model.User; import com.venky.swf.db.model.reflection.ModelReflector; import com.venky.swf.db.table.Table.ColumnDescriptor; import com.venky.swf.exceptions.AccessDeniedException; import com.venky.swf.routing.Config; import com.venky.swf.sql.*; import com.venky.swf.sql.parser.SQLExpressionParser; import java.lang.annotation.Annotation; import java.lang.reflect.*; import java.util.*; /** * * @author venky */ public class ModelInvocationHandler implements InvocationHandler { private Record record = null; private Class<? extends Model> modelClass = null; private List<String> virtualFields = new IgnoreCaseList(false); private String modelName = null; private transient Model proxy = null; private transient ModelReflector<? extends Model> reflector = null; @SuppressWarnings("unchecked") public <M extends Model> ModelReflector<M> getReflector() { if (reflector == null) { reflector = ModelReflector.instance(modelClass); } return (ModelReflector<M>) reflector; } public String getModelName(){ return modelName; } public String getPool(){ return getReflector().getPool(); } public Class<? extends Model> getModelClass(){ return modelClass; } /** * Used for serialization.: */ protected ModelInvocationHandler() { } public ModelInvocationHandler(Class<? extends Model> modelClass, Record record) { this.record = record; this.modelClass = modelClass; this.reflector = ModelReflector.instance(modelClass); this.modelName = Table.getSimpleModelClassName(reflector.getTableName()); this.virtualFields = reflector.getVirtualFields(); record.startTracking(false); } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { bootStrapProxy(getModelClass().cast(proxy)); String mName = method.getName(); Class<?> retType = method.getReturnType(); Class<?>[] parameters = method.getParameterTypes(); if (getReflector().getFieldGetterSignatures().contains(getReflector().getSignature(method))) { String fieldName = getReflector().getFieldName(method); if (!virtualFields.contains(fieldName)){ ColumnDescriptor cd = getReflector().getColumnDescriptor(fieldName); String columnName = cd.getName(); Object value = record.get(columnName); TypeRef<?> ref =Database.getJdbcTypeHelper(getPool()).getTypeRef(retType); TypeConverter<?> converter = ref.getTypeConverter(); if (value == null) { COLUMN_DEF colDef = getReflector().getAnnotation(method, COLUMN_DEF.class); if (colDef != null) { value = StandardDefaulter.getDefaultValue(colDef.value(), colDef.args()); } } if (value == null){ if (retType.isPrimitive()){ return converter.valueOf(value); }else { return value; } } else if (retType.isInstance(value) && !ref.isLOB()) { return value; } else { return converter.valueOf(value); } } } else if (getReflector().getFieldSetters().contains(method) ) { String fieldName = StringUtil.underscorize(mName.substring(3)); if (!virtualFields.contains(fieldName)){ String columnName = getReflector().getColumnDescriptor(fieldName).getName(); return record.put(columnName, args[0]); } } else if (getReflector().getReferredModelGetters().contains(method)) { if (!getReflector().isAnnotationPresent(method,IS_VIRTUAL.class)){ return getParent(method); } } else if (getReflector().getChildGetters().contains(method)) { if (!getReflector().isAnnotationPresent(method,IS_VIRTUAL.class)){ CONNECTED_VIA join = getReflector().getAnnotation(method,CONNECTED_VIA.class); if (join != null){ return getChildren(getReflector().getChildModelClass(method),join.value(),join.additional_join()); }else { return getChildren(getReflector().getChildModelClass(method)); } } } /* Optimization for (Object impl: modelImplObjects){ try { Method inModelImplClass = impl.getClass().getMethod(mName, parameters); if (retType.isAssignableFrom(inModelImplClass.getReturnType())){ Timer timer = startTimer(inModelImplClass.toString()); try { return inModelImplClass.invoke(impl, args); }catch(InvocationTargetException ex){ throw ex.getCause(); }finally{ timer.stop(); } } }catch(NoSuchMethodException ex){ // } } Method inCurrentClass = this.getClass().getMethod(mName, parameters); if (retType.isAssignableFrom(inCurrentClass.getReturnType())) { try { return inCurrentClass.invoke(this, args); }catch (InvocationTargetException ex){ throw ex.getCause(); } } else { throw new NoSuchMethodException("Donot know how to execute this method"); } */ Class<?> implClass = getMethodImplClass(method); Object implObject = null; if (implClass != null){ implObject = modelImplObjects.get(implClass); } if (implClass == null || implObject == null){ //implObject is null while constructing impls. implClass = this.getClass(); implObject = this; } Method inImplClass = implClass.getMethod(mName, parameters); if (retType.isAssignableFrom(inImplClass.getReturnType())) { Timer timer = cat.startTimer(inImplClass.toString()); try { return inImplClass.invoke(implObject, args); }catch (InvocationTargetException ex){ throw ex.getCause(); }finally{ timer.stop(); } }else { throw new NoSuchMethodException("Donot know how to execute " + getReflector().getSignature(method)); } } private transient final SWFLogger cat = Config.instance().getLogger(getClass().getName()+"."+getModelName()); @SuppressWarnings("unchecked") public <P extends Model> P getParent(Method parentGetter) { Class<P> parentClass = (Class<P>) parentGetter.getReturnType(); String parentIdFieldName = StringUtil.underscorize(parentGetter.getName().substring(3) +"Id"); Method parentIdGetter = this.getReflector().getFieldGetter(parentIdFieldName); Number parentId; try { parentId = (Number)parentIdGetter.invoke(proxy); } catch (Exception e) { throw new RuntimeException(parentIdFieldName,e); } P parent = null; if (parentId != null) { parent = Database.getTable(parentClass).get(parentId.longValue()); } return parent; } public <C extends Model> List<C> getChildren(Class<C> childClass){ Class<? extends Model> modelClass = getReflector().getModelClass(); ModelReflector<?> childReflector = ModelReflector.instance(childClass); Expression expression = new Expression(childReflector.getPool(),Conjunction.OR); for (String fieldName: childReflector.getFields()){ if (fieldName.endsWith("_ID")){ Method fieldGetter = childReflector.getFieldGetter(fieldName); Method referredModelGetter = childReflector.getReferredModelGetterFor(fieldGetter); if (referredModelGetter != null && ObjectUtil.equals(referredModelGetter.getReturnType().getSimpleName(),modelClass.getSimpleName())){ String columnName = childReflector.getColumnDescriptor(fieldName).getName(); expression.add(new Expression(childReflector.getPool(),columnName,Operator.EQ,proxy.getId())); } } } if (expression.isEmpty()){ throw new RuntimeException("Don;t know how to getChildren of kind " + childClass.getSimpleName() + " for " + modelClass.getSimpleName()); } return getChildren(childClass,expression); } public <C extends Model> List<C> getChildren(Class<C> childClass, String parentIdFieldName){ return getChildren(childClass,parentIdFieldName,null); } public <C extends Model> List<C> getChildren(Class<C> childClass, String parentIdFieldName, String addnl_condition){ long parentId = proxy.getId(); ModelReflector<C> childReflector = ModelReflector.instance(childClass); String parentIdColumnName = childReflector.getColumnDescriptor(parentIdFieldName).getName(); Expression where = new Expression(getPool(),Conjunction.AND); where.add(new Expression(getPool(),parentIdColumnName,Operator.EQ,new BindVariable(getPool(),parentId))); if (!ObjectUtil.isVoid(addnl_condition)){ Expression addnl = new SQLExpressionParser(childClass).parse(addnl_condition); where.add(addnl); } return getChildren(childClass, where); } public <C extends Model> List<C> getChildren(Class<C> childClass, Expression expression){ Select q = new Select(); q.from(childClass); q.where(expression); q.orderBy(ModelReflector.instance(childClass).getOrderBy()); return q.execute(childClass); } public <M extends Model> void setProxy(M proxy) { this.proxy = proxy; } @SuppressWarnings("unchecked") public <M extends Model> M getProxy() { return (M)proxy; } public boolean isAccessibleBy(User user){ return isAccessibleBy(user, getReflector().getModelClass()); } public Set<String> getParticipatingRoles(User user){ return getParticipatingRoles(user,getReflector().getModelClass()); } public Set<String> getParticipatingRoles(User user,Class<? extends Model> asModel){ if (!getReflector().reflects(asModel)){ throw new AccessDeniedException(); } return getParticipatingRoles(user, user.getParticipationOptions(asModel,getProxy())); } private Set<String> getParticipatingRoles(User user,Cache<String,Map<String,List<Long>>> pGroupOptions){ Timer timer = cat.startTimer(); try { ModelReflector<? extends Model> reflector = getReflector(); Set<String> participantingRoles = new HashSet<String>(); for (String participantRoleGroup : pGroupOptions.keySet()){ Map<String,List<Long>> pOptions = pGroupOptions.get(participantRoleGroup); for (String referencedModelIdFieldName :pOptions.keySet()){ Number referenceValue = reflector.get(getRawRecord(),referencedModelIdFieldName); PARTICIPANT participant = reflector.getAnnotation(reflector.getFieldGetter(referencedModelIdFieldName), PARTICIPANT.class); if (participant.redundant() || pOptions.get(referencedModelIdFieldName).contains(referenceValue)){ participantingRoles.add(reflector.getParticipatingRole(referencedModelIdFieldName)); } } if (!pOptions.isEmpty() && participantingRoles.isEmpty()){ throw new AccessDeniedException(); // User is not a participant on the model. } } return participantingRoles; }finally{ timer.stop(); } } public boolean isAccessibleBy(User user,Class<? extends Model> asModel){ Timer timer = cat.startTimer(null,Config.instance().isTimerAdditive()); try { if (!getReflector().reflects(asModel)){ return false; } Set<String> pRoles = getParticipatingRoles(user,asModel); return (pRoles != null);// It is always true. returning false depends on AccessDeniedException being thrown. }catch(AccessDeniedException ex){ return false; }finally{ timer.stop(); } } public Record getRawRecord(){ return record; } public static void dispose(){ modelImplClassesCache.clear(); methodImplClassCache.clear(); } public static <M extends Model> M getProxy(Class<M> modelClass, Record record) { ModelReflector<M> ref = ModelReflector.instance(modelClass); try { ModelInvocationHandler mImpl = new ModelInvocationHandler(modelClass, record); M m = modelClass.cast(Proxy.newProxyInstance(modelClass.getClassLoader(), ref.getClassHierarchies().toArray(new Class<?>[]{}), mImpl)); mImpl.bootStrapProxy(m); return m; } catch (Exception e) { throw new RuntimeException(e); } } private <M extends Model> void bootStrapProxy(M m) { if (proxy == null) { setProxy(m); List<Class<?>> modelImplClasses = getModelImplClasses(modelClass); for (Class<?> implClass: modelImplClasses){ addModelImplObject(constructImpl(implClass, m)); } } } @SuppressWarnings("unchecked") private static <M extends Model> Object constructImpl(Class<?> implClass, M m){ if (ModelImpl.class.isAssignableFrom(implClass)){ if (ModelImpl.class.equals(implClass)) { return new ModelImpl<M>(m); }else { ParameterizedType pt = (ParameterizedType)implClass.getGenericSuperclass(); Class<? extends Model> modelClass = (Class<? extends Model>) pt.getActualTypeArguments()[0]; try { return implClass.getConstructor(modelClass).newInstance(m); } catch (Exception e) { throw new RuntimeException(e); } } } throw new RuntimeException("Don't know how to instantiate " + implClass.getName()); } private transient SequenceMap<Class<?>,Object> modelImplObjects = new SequenceMap<Class<?>,Object>(); private void addModelImplObject(Object o){ modelImplObjects.put(o.getClass(),o); } private Class<?> getMethodImplClass(Method m){ return methodImplClassCache.get(getReflector().getModelClass()).get(m); } private static Cache<Class<? extends Model>,Cache<Method,Class<?>>> methodImplClassCache = new Cache<Class<? extends Model>, Cache<Method,Class<?>>>() { /** * */ private static final long serialVersionUID = -8303755398345923039L; @Override protected Cache<Method, Class<?>> getValue(final Class<? extends Model> modelClass) { return new Cache<Method, Class<?>>() { /** * */ private static final long serialVersionUID = 1322249489351360016L; @Override protected Class<?> getValue(Method method) { String mName = method.getName(); Class<?> retType = method.getReturnType(); Class<?>[] parameters = method.getParameterTypes(); for (Class<?> implClass: getModelImplClasses(modelClass)){ try { Method inModelImplClass = implClass.getMethod(mName, parameters); if (retType.isAssignableFrom(inModelImplClass.getReturnType())){ return implClass; } }catch (NoSuchMethodException ex){ // } } return null; } }; } }; private static Cache<Class<? extends Model>,List<Class<?>>> modelImplClassesCache = new Cache<Class<? extends Model>, List<Class<?>>>() { /** * */ private static final long serialVersionUID = 7544606584634901930L; @Override protected List<Class<?>> getValue(Class<? extends Model> modelClass) { SequenceSet<Class<? extends Model>> modelClasses = ModelReflector.instance(modelClass).getClassHierarchies(); List<Class<?>> modelImplClasses = new ArrayList<Class<?>>(); for (Class<?> c : modelClasses){ String modelImplClassName = c.getName()+"Impl"; try { Class<?> modelImplClass = Class.forName(modelImplClassName); if (ModelImpl.class.isAssignableFrom(modelImplClass)){ modelImplClasses.add(modelImplClass); }else { throw new ClassCastException(modelImplClassName + " does not extend " + ModelImpl.class.getName()); } }catch(ClassNotFoundException ex){ // Nothing } } return modelImplClasses; } }; private static <M extends Model> List<Class<?>> getModelImplClasses(Class<M> modelClass){ return modelImplClassesCache.get(modelClass); } public void save() { save(true); } public void save(boolean validate) { if (!isDirty()) { return; } if (validate){ validate(); } beforeSave(); if (record.isNewRecord()) { callExtensions("before.create"); create(); callExtensions("after.create"); } else { callExtensions("before.update"); update(); callExtensions("after.update"); } afterSave(); } public void init(){ } private static final Cache<String,List<FieldValidator<? extends Annotation>>> _fieldValidators = new Cache<String, List<FieldValidator<? extends Annotation>>>() { /** * */ private static final long serialVersionUID = -8174150221673158116L; @Override protected List<FieldValidator<? extends Annotation>> getValue(String pool) { List<FieldValidator<? extends Annotation>> fieldValidators = new ArrayList<FieldValidator<? extends Annotation>>(); fieldValidators.add(new ExactLengthValidator(pool)); fieldValidators.add(new MaxLengthValidator(pool)); fieldValidators.add(new MinLengthValidator(pool)); fieldValidators.add(new NotNullValidator(pool)); fieldValidators.add(new RegExValidator(pool)); fieldValidators.add(new EnumerationValidator(pool)); fieldValidators.add(new DateFormatValidator(pool)); fieldValidators.add(new NumericRangeValidator(pool)); fieldValidators.add(new IntegerRangeValidator(pool)); return fieldValidators; } }; private static final List<ModelValidator> modelValidators = new ArrayList<ModelValidator>(); static{ modelValidators.add(new UniqueKeyValidator()); } protected boolean isModelValid(MultiException ex) { List<String> fields = getReflector().getEditableFields(); boolean ret = true; for (String field : fields) { MultiException fieldException = new MultiException(); if (!getReflector().isHouseKeepingField(field) && !isFieldValid(field,fieldException)) { ex.add(fieldException); ret = false; } } if (ret){ for (ModelValidator v : modelValidators){ ret = v.isValid(getProxy(),ex) && ret; } } return ret; } protected boolean isFieldValid(String field, MultiException fieldException) { boolean ret = true; Iterator<FieldValidator<? extends Annotation>> i = _fieldValidators.get(getPool()).iterator(); while (i.hasNext()) { FieldValidator<? extends Annotation> v = i.next(); ret = v.isValid(getProxy(), field, fieldException) && ret; } return ret; } protected void validate(){ beforeValidate(); MultiException me = new MultiException(); if (!isModelValid(me)) { throw me; } afterValidate(); } private <R extends Model> SequenceSet<String> getExtensionPoints(Class<R> modelClass, String extnPointNameSuffix){ SequenceSet<String> extnPoints = new SequenceSet<String>(); ModelReflector<R> ref = ModelReflector.instance(modelClass); for (Class<? extends Model> inHierarchy : ref.getClassHierarchies()){ String extnPoint = inHierarchy.getSimpleName() + "." + extnPointNameSuffix; extnPoints.add(extnPoint); } return extnPoints; } private <R extends Model> void callExtensions(String extnPointNameSuffix){ for (String extnPoint: getExtensionPoints(getReflector().getModelClass(), extnPointNameSuffix)){ Registry.instance().callExtensions(extnPoint, getProxy()); } } protected void beforeValidate(){ defaultFields(); callExtensions("before.validate"); } public void defaultFields(){ if (!record.isNewRecord()){ ColumnDescriptor updatedAt = getReflector().getColumnDescriptor("updated_at"); ColumnDescriptor updatorUser = getReflector().getColumnDescriptor("updater_user_id"); if (!updatedAt.isVirtual()){ proxy.setUpdatedAt(null); } if (!updatorUser.isVirtual()){ proxy.setUpdaterUserId(null); } } ModelReflector<? extends Model> reflector = getReflector(); for (String field:reflector.getRealFields()){ String columnName = reflector.getColumnDescriptor(field).getName(); if (record.get(columnName) == null){ Method fieldGetter = reflector.getFieldGetter(field); COLUMN_DEF cdef = reflector.getAnnotation(fieldGetter,COLUMN_DEF.class); if (cdef != null){ Object defaultValue = StandardDefaulter.getDefaultValue(cdef.value(),cdef.args(),reflector.getTimeZone()); record.put(columnName,defaultValue); } } } } protected void afterValidate(){ callExtensions("after.validate"); } protected void beforeSave() { callExtensions("before.save"); } protected void afterSave() { callExtensions("after.save"); } protected void beforeDestory(){ callExtensions("before.destroy"); } protected void afterDestroy(){ callExtensions("after.destroy"); } public boolean isBeingDestroyed(){ return beingDestroyed; } private boolean beingDestroyed = false; private void destroyCascade(){ ModelReflector<? extends Model> ref = getReflector(); for (Method childrenGetter : ref.getChildGetters()){ Class<? extends Model> childModelClass = ref.getChildModelClass(childrenGetter); ModelReflector<? extends Model> childReflector = ModelReflector.instance(childModelClass); List<String> referenceFields = childReflector.getReferenceFields(ref.getModelClass()); for (String referenceField: referenceFields){ try { if (childReflector.getRealModelClass() == null){ continue; } @SuppressWarnings("unchecked") List<Model> children = (List<Model>)childrenGetter.invoke(getProxy()); for (Model child : children){ if (childReflector.isFieldMandatory(referenceField)){ child.destroy(); }else { childReflector.set(child,referenceField,null); child.save(); } } } catch (Exception e) { throw new RuntimeException(e); } } } } public void destroy() { if (isBeingDestroyed()){ return; } try { beingDestroyed = true; beforeDestory(); destroyCascade(); Delete q = new Delete(getReflector()); Expression condition = new Expression(getPool(),Conjunction.AND); condition.add(new Expression(getPool(),getReflector().getColumnDescriptor("id").getName(),Operator.EQ,new BindVariable(getPool(),proxy.getId()))); condition.add(new Expression(getPool(),getReflector().getColumnDescriptor("lock_id").getName(),Operator.EQ,new BindVariable(getPool(),proxy.getLockId()))); q.where(condition); if (q.executeUpdate() <= 0){ throw new RecordNotFoundException(); } Database.getInstance().getCache(getReflector()).registerDestroy((Model)getProxy()); Database.getInstance().getCurrentTransaction().registerTableDataChanged(getReflector().getTableName()); afterDestroy(); }finally{ beingDestroyed = false; } } private void update() { int oldLockId = proxy.getLockId(); int newLockId = oldLockId + 1; Update q = new Update(getReflector()); Iterator<String> fI = record.getDirtyFields().iterator(); while (fI.hasNext()) { String columnName = fI.next(); String fieldName = getReflector().getFieldName(columnName); TypeRef<?> ref = Database.getJdbcTypeHelper(getPool()).getTypeRef(getReflector().getFieldGetter(fieldName).getReturnType()); q.set(columnName,new BindVariable(getPool(),record.get(columnName), ref)); } String idColumn = getReflector().getColumnDescriptor("id").getName(); Expression condition = new Expression(getPool(),Conjunction.AND); condition.add(new Expression(getPool(),idColumn,Operator.EQ,new BindVariable(getPool(),proxy.getId()))); ColumnDescriptor lockIdColumDescriptor = getReflector().getColumnDescriptor("lock_id"); if (!lockIdColumDescriptor.isVirtual()){ String lockidColumn = lockIdColumDescriptor.getName(); q.set(lockidColumn,new BindVariable(getPool(),newLockId)); condition.add(new Expression(getPool(),lockidColumn,Operator.EQ,new BindVariable(getPool(),oldLockId))); } q.where(condition); if (q.executeUpdate() <= 0){ throw new RecordNotFoundException(); } proxy.setLockId(newLockId); record.startTracking(); if (!getReflector().isAnnotationPresent(CONFIGURATION.class)){ record.setLocked(true); //Do only for transaction tables as config cache would need to be reset to false after commit. This is just to avoid that unwanted loop over config records cached. } Database.getInstance().getCache(getReflector()).registerUpdate((Model)getProxy()); Database.getInstance().getCurrentTransaction().registerTableDataChanged(getReflector().getTableName()); } private void create() { proxy.setLockId(0); //Table<? extends Model> table = Database.getTable(getReflector().getTableName()); Insert insertSQL = new Insert(getReflector()); Map<String,BindVariable> values = new HashMap<String, BindVariable>(); Iterator<String> columnIterator = record.getDirtyFields().iterator(); while (columnIterator.hasNext()) { String columnName = columnIterator.next(); String fieldName = getReflector().getFieldName(columnName); if (fieldName == null){ continue; } TypeRef<?> ref = Database.getJdbcTypeHelper(getPool()).getTypeRef(getReflector().getFieldGetter(fieldName).getReturnType()); values.put(columnName,new BindVariable(getPool(),record.get(columnName), ref)); } insertSQL.values(values); Record generatedValues = new Record(getPool()); Set<String> autoIncrementColumns = getReflector().getAutoIncrementColumns(); assert (autoIncrementColumns.size() <= 1); // atmost one auto increment id column List<String> generatedKeys = new ArrayList<String>(); for (String anAutoIncrementColumn:autoIncrementColumns){ if ( Database.getJdbcTypeHelper(getPool()).isColumnNameAutoLowerCasedInDB() ){ generatedKeys.add(LowerCaseStringCache.instance().get(anAutoIncrementColumn)); }else { generatedKeys.add(anAutoIncrementColumn); } } insertSQL.executeUpdate(generatedValues, generatedKeys.toArray(new String[]{})); if (generatedKeys.size() == 1){ if (generatedValues.getFieldNames().size() == 1){ String virtualFieldName = generatedValues.getFieldNames().iterator().next(); long id = ((Number)generatedValues.get(virtualFieldName)).longValue(); String fieldName = generatedKeys.get(0); record.put(fieldName, id); } } record.setNewRecord(false); record.startTracking(); if (!getReflector().isAnnotationPresent(CONFIGURATION.class)){ record.setLocked(true); } Database.getInstance().getCache(getReflector()).registerInsert((Model)getProxy()); Database.getInstance().getCurrentTransaction().registerTableDataChanged(getReflector().getTableName()); } @Override public boolean equals(Object o){ if (o == null){ return false; } if (!(o instanceof ModelInvocationHandler) && !getReflector().canReflect(o)){ return false; } if (o instanceof ModelInvocationHandler){ return equalImpl((ModelInvocationHandler)o); }else { return equalsProxy((Model)o); } } public int hashCode(){ return (getModelName() + ":" + getProxy().getId()).hashCode() ; } protected boolean equalImpl(ModelInvocationHandler anotherImpl){ return (getProxy().getId() == anotherImpl.getProxy().getId()) && getReflector().getTableName().equals(anotherImpl.getReflector().getTableName()); } protected boolean equalsProxy(Model anotherProxy){ boolean ret = false; if (anotherProxy != null){ ret = getProxy().getId() == anotherProxy.getId(); } return ret; } @SuppressWarnings("unchecked") public <M extends Model> M cloneProxy(){ return (M)getRawRecord().clone().getAsProxy(getReflector().getModelClass()); } private transient Map<String,Object> txnProperties = new HashMap<String, Object>(); public Object getTxnProperty(String name) { return txnProperties.get(name); } public void setTxnProperty(String name,Object value) { txnProperties.put(name, value); } public Object removeTxnProperty(String name) { return txnProperties.remove(name); } public boolean isDirty(){ return !getProxy().getRawRecord().getDirtyFields().isEmpty(); } }
swf-db/src/main/java/com/venky/swf/db/table/ModelInvocationHandler.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.venky.swf.db.table; import com.venky.cache.Cache; import com.venky.core.collections.IgnoreCaseList; import com.venky.core.collections.LowerCaseStringCache; import com.venky.core.collections.SequenceMap; import com.venky.core.collections.SequenceSet; import com.venky.core.log.SWFLogger; import com.venky.core.log.TimerStatistics.Timer; import com.venky.core.string.StringUtil; import com.venky.core.util.MultiException; import com.venky.core.util.ObjectUtil; import com.venky.extension.Registry; import com.venky.swf.db.Database; import com.venky.swf.db.JdbcTypeHelper.TypeConverter; import com.venky.swf.db.JdbcTypeHelper.TypeRef; import com.venky.swf.db.annotations.column.COLUMN_DEF; import com.venky.swf.db.annotations.column.IS_VIRTUAL; import com.venky.swf.db.annotations.column.defaulting.StandardDefaulter; import com.venky.swf.db.annotations.column.relationship.CONNECTED_VIA; import com.venky.swf.db.annotations.column.validations.processors.*; import com.venky.swf.db.annotations.model.CONFIGURATION; import com.venky.swf.db.annotations.model.validations.ModelValidator; import com.venky.swf.db.annotations.model.validations.UniqueKeyValidator; import com.venky.swf.db.model.Model; import com.venky.swf.db.model.User; import com.venky.swf.db.model.reflection.ModelReflector; import com.venky.swf.db.table.Table.ColumnDescriptor; import com.venky.swf.exceptions.AccessDeniedException; import com.venky.swf.routing.Config; import com.venky.swf.sql.*; import com.venky.swf.sql.parser.SQLExpressionParser; import java.lang.annotation.Annotation; import java.lang.reflect.*; import java.util.*; /** * * @author venky */ public class ModelInvocationHandler implements InvocationHandler { private Record record = null; private Class<? extends Model> modelClass = null; private List<String> virtualFields = new IgnoreCaseList(false); private String modelName = null; private transient Model proxy = null; private transient ModelReflector<? extends Model> reflector = null; @SuppressWarnings("unchecked") public <M extends Model> ModelReflector<M> getReflector() { if (reflector == null) { reflector = ModelReflector.instance(modelClass); } return (ModelReflector<M>) reflector; } public String getModelName(){ return modelName; } public String getPool(){ return getReflector().getPool(); } public Class<? extends Model> getModelClass(){ return modelClass; } /** * Used for serialization.: */ protected ModelInvocationHandler() { } public ModelInvocationHandler(Class<? extends Model> modelClass, Record record) { this.record = record; this.modelClass = modelClass; this.reflector = ModelReflector.instance(modelClass); this.modelName = Table.getSimpleModelClassName(reflector.getTableName()); this.virtualFields = reflector.getVirtualFields(); record.startTracking(false); } public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { bootStrapProxy(getModelClass().cast(proxy)); String mName = method.getName(); Class<?> retType = method.getReturnType(); Class<?>[] parameters = method.getParameterTypes(); if (getReflector().getFieldGetterSignatures().contains(getReflector().getSignature(method))) { String fieldName = getReflector().getFieldName(method); if (!virtualFields.contains(fieldName)){ ColumnDescriptor cd = getReflector().getColumnDescriptor(fieldName); String columnName = cd.getName(); Object value = record.get(columnName); TypeRef<?> ref =Database.getJdbcTypeHelper(getPool()).getTypeRef(retType); TypeConverter<?> converter = ref.getTypeConverter(); if (value == null) { COLUMN_DEF colDef = getReflector().getAnnotation(method, COLUMN_DEF.class); if (colDef != null) { value = StandardDefaulter.getDefaultValue(colDef.value(), colDef.args()); } } if (value == null){ if (retType.isPrimitive()){ return converter.valueOf(value); }else { return value; } } else if (retType.isInstance(value) && !ref.isLOB()) { return value; } else { return converter.valueOf(value); } } } else if (getReflector().getFieldSetters().contains(method) ) { String fieldName = StringUtil.underscorize(mName.substring(3)); if (!virtualFields.contains(fieldName)){ String columnName = getReflector().getColumnDescriptor(fieldName).getName(); return record.put(columnName, args[0]); } } else if (getReflector().getReferredModelGetters().contains(method)) { if (!getReflector().isAnnotationPresent(method,IS_VIRTUAL.class)){ return getParent(method); } } else if (getReflector().getChildGetters().contains(method)) { if (!getReflector().isAnnotationPresent(method,IS_VIRTUAL.class)){ CONNECTED_VIA join = getReflector().getAnnotation(method,CONNECTED_VIA.class); if (join != null){ return getChildren(getReflector().getChildModelClass(method),join.value(),join.additional_join()); }else { return getChildren(getReflector().getChildModelClass(method)); } } } /* Optimization for (Object impl: modelImplObjects){ try { Method inModelImplClass = impl.getClass().getMethod(mName, parameters); if (retType.isAssignableFrom(inModelImplClass.getReturnType())){ Timer timer = startTimer(inModelImplClass.toString()); try { return inModelImplClass.invoke(impl, args); }catch(InvocationTargetException ex){ throw ex.getCause(); }finally{ timer.stop(); } } }catch(NoSuchMethodException ex){ // } } Method inCurrentClass = this.getClass().getMethod(mName, parameters); if (retType.isAssignableFrom(inCurrentClass.getReturnType())) { try { return inCurrentClass.invoke(this, args); }catch (InvocationTargetException ex){ throw ex.getCause(); } } else { throw new NoSuchMethodException("Donot know how to execute this method"); } */ Class<?> implClass = getMethodImplClass(method); Object implObject = null; if (implClass != null){ implObject = modelImplObjects.get(implClass); } if (implClass == null || implObject == null){ //implObject is null while constructing impls. implClass = this.getClass(); implObject = this; } Method inImplClass = implClass.getMethod(mName, parameters); if (retType.isAssignableFrom(inImplClass.getReturnType())) { Timer timer = cat.startTimer(inImplClass.toString()); try { return inImplClass.invoke(implObject, args); }catch (InvocationTargetException ex){ throw ex.getCause(); }finally{ timer.stop(); } }else { throw new NoSuchMethodException("Donot know how to execute " + getReflector().getSignature(method)); } } private transient final SWFLogger cat = Config.instance().getLogger(getClass().getName()+"."+getModelName()); @SuppressWarnings("unchecked") public <P extends Model> P getParent(Method parentGetter) { Class<P> parentClass = (Class<P>) parentGetter.getReturnType(); String parentIdFieldName = StringUtil.underscorize(parentGetter.getName().substring(3) +"Id"); Method parentIdGetter = this.getReflector().getFieldGetter(parentIdFieldName); Number parentId; try { parentId = (Number)parentIdGetter.invoke(proxy); } catch (Exception e) { throw new RuntimeException(parentIdFieldName,e); } P parent = null; if (parentId != null) { parent = Database.getTable(parentClass).get(parentId.longValue()); } return parent; } public <C extends Model> List<C> getChildren(Class<C> childClass){ Class<? extends Model> modelClass = getReflector().getModelClass(); ModelReflector<?> childReflector = ModelReflector.instance(childClass); Expression expression = new Expression(childReflector.getPool(),Conjunction.OR); for (String fieldName: childReflector.getFields()){ if (fieldName.endsWith("_ID")){ Method fieldGetter = childReflector.getFieldGetter(fieldName); Method referredModelGetter = childReflector.getReferredModelGetterFor(fieldGetter); if (referredModelGetter != null && ObjectUtil.equals(referredModelGetter.getReturnType().getSimpleName(),modelClass.getSimpleName())){ String columnName = childReflector.getColumnDescriptor(fieldName).getName(); expression.add(new Expression(childReflector.getPool(),columnName,Operator.EQ,proxy.getId())); } } } if (expression.isEmpty()){ throw new RuntimeException("Don;t know how to getChildren of kind " + childClass.getSimpleName() + " for " + modelClass.getSimpleName()); } return getChildren(childClass,expression); } public <C extends Model> List<C> getChildren(Class<C> childClass, String parentIdFieldName){ return getChildren(childClass,parentIdFieldName,null); } public <C extends Model> List<C> getChildren(Class<C> childClass, String parentIdFieldName, String addnl_condition){ long parentId = proxy.getId(); ModelReflector<C> childReflector = ModelReflector.instance(childClass); String parentIdColumnName = childReflector.getColumnDescriptor(parentIdFieldName).getName(); Expression where = new Expression(getPool(),Conjunction.AND); where.add(new Expression(getPool(),parentIdColumnName,Operator.EQ,new BindVariable(getPool(),parentId))); if (!ObjectUtil.isVoid(addnl_condition)){ Expression addnl = new SQLExpressionParser(childClass).parse(addnl_condition); where.add(addnl); } return getChildren(childClass, where); } public <C extends Model> List<C> getChildren(Class<C> childClass, Expression expression){ Select q = new Select(); q.from(childClass); q.where(expression); q.orderBy(ModelReflector.instance(childClass).getOrderBy()); return q.execute(childClass); } public <M extends Model> void setProxy(M proxy) { this.proxy = proxy; } @SuppressWarnings("unchecked") public <M extends Model> M getProxy() { return (M)proxy; } public boolean isAccessibleBy(User user){ return isAccessibleBy(user, getReflector().getModelClass()); } public Set<String> getParticipatingRoles(User user){ return getParticipatingRoles(user,getReflector().getModelClass()); } public Set<String> getParticipatingRoles(User user,Class<? extends Model> asModel){ if (!getReflector().reflects(asModel)){ throw new AccessDeniedException(); } return getParticipatingRoles(user, user.getParticipationOptions(asModel,getProxy())); } private Set<String> getParticipatingRoles(User user,Cache<String,Map<String,List<Long>>> pGroupOptions){ Timer timer = cat.startTimer(); try { ModelReflector<? extends Model> reflector = getReflector(); Set<String> participantingRoles = new HashSet<String>(); for (String participantRoleGroup : pGroupOptions.keySet()){ Map<String,List<Long>> pOptions = pGroupOptions.get(participantRoleGroup); for (String referencedModelIdFieldName :pOptions.keySet()){ Number referenceValue = reflector.get(getRawRecord(),referencedModelIdFieldName); if (pOptions.get(referencedModelIdFieldName).contains(referenceValue)){ participantingRoles.add(reflector.getParticipatingRole(referencedModelIdFieldName)); } } if (!pOptions.isEmpty() && participantingRoles.isEmpty()){ throw new AccessDeniedException(); // User is not a participant on the model. } } return participantingRoles; }finally{ timer.stop(); } } public boolean isAccessibleBy(User user,Class<? extends Model> asModel){ Timer timer = cat.startTimer(null,Config.instance().isTimerAdditive()); try { if (!getReflector().reflects(asModel)){ return false; } Set<String> pRoles = getParticipatingRoles(user,asModel); return (pRoles != null);// It is always true. returning false depends on AccessDeniedException being thrown. }catch(AccessDeniedException ex){ return false; }finally{ timer.stop(); } } public Record getRawRecord(){ return record; } public static void dispose(){ modelImplClassesCache.clear(); methodImplClassCache.clear(); } public static <M extends Model> M getProxy(Class<M> modelClass, Record record) { ModelReflector<M> ref = ModelReflector.instance(modelClass); try { ModelInvocationHandler mImpl = new ModelInvocationHandler(modelClass, record); M m = modelClass.cast(Proxy.newProxyInstance(modelClass.getClassLoader(), ref.getClassHierarchies().toArray(new Class<?>[]{}), mImpl)); mImpl.bootStrapProxy(m); return m; } catch (Exception e) { throw new RuntimeException(e); } } private <M extends Model> void bootStrapProxy(M m) { if (proxy == null) { setProxy(m); List<Class<?>> modelImplClasses = getModelImplClasses(modelClass); for (Class<?> implClass: modelImplClasses){ addModelImplObject(constructImpl(implClass, m)); } } } @SuppressWarnings("unchecked") private static <M extends Model> Object constructImpl(Class<?> implClass, M m){ if (ModelImpl.class.isAssignableFrom(implClass)){ if (ModelImpl.class.equals(implClass)) { return new ModelImpl<M>(m); }else { ParameterizedType pt = (ParameterizedType)implClass.getGenericSuperclass(); Class<? extends Model> modelClass = (Class<? extends Model>) pt.getActualTypeArguments()[0]; try { return implClass.getConstructor(modelClass).newInstance(m); } catch (Exception e) { throw new RuntimeException(e); } } } throw new RuntimeException("Don't know how to instantiate " + implClass.getName()); } private transient SequenceMap<Class<?>,Object> modelImplObjects = new SequenceMap<Class<?>,Object>(); private void addModelImplObject(Object o){ modelImplObjects.put(o.getClass(),o); } private Class<?> getMethodImplClass(Method m){ return methodImplClassCache.get(getReflector().getModelClass()).get(m); } private static Cache<Class<? extends Model>,Cache<Method,Class<?>>> methodImplClassCache = new Cache<Class<? extends Model>, Cache<Method,Class<?>>>() { /** * */ private static final long serialVersionUID = -8303755398345923039L; @Override protected Cache<Method, Class<?>> getValue(final Class<? extends Model> modelClass) { return new Cache<Method, Class<?>>() { /** * */ private static final long serialVersionUID = 1322249489351360016L; @Override protected Class<?> getValue(Method method) { String mName = method.getName(); Class<?> retType = method.getReturnType(); Class<?>[] parameters = method.getParameterTypes(); for (Class<?> implClass: getModelImplClasses(modelClass)){ try { Method inModelImplClass = implClass.getMethod(mName, parameters); if (retType.isAssignableFrom(inModelImplClass.getReturnType())){ return implClass; } }catch (NoSuchMethodException ex){ // } } return null; } }; } }; private static Cache<Class<? extends Model>,List<Class<?>>> modelImplClassesCache = new Cache<Class<? extends Model>, List<Class<?>>>() { /** * */ private static final long serialVersionUID = 7544606584634901930L; @Override protected List<Class<?>> getValue(Class<? extends Model> modelClass) { SequenceSet<Class<? extends Model>> modelClasses = ModelReflector.instance(modelClass).getClassHierarchies(); List<Class<?>> modelImplClasses = new ArrayList<Class<?>>(); for (Class<?> c : modelClasses){ String modelImplClassName = c.getName()+"Impl"; try { Class<?> modelImplClass = Class.forName(modelImplClassName); if (ModelImpl.class.isAssignableFrom(modelImplClass)){ modelImplClasses.add(modelImplClass); }else { throw new ClassCastException(modelImplClassName + " does not extend " + ModelImpl.class.getName()); } }catch(ClassNotFoundException ex){ // Nothing } } return modelImplClasses; } }; private static <M extends Model> List<Class<?>> getModelImplClasses(Class<M> modelClass){ return modelImplClassesCache.get(modelClass); } public void save() { save(true); } public void save(boolean validate) { if (!isDirty()) { return; } if (validate){ validate(); } beforeSave(); if (record.isNewRecord()) { callExtensions("before.create"); create(); callExtensions("after.create"); } else { callExtensions("before.update"); update(); callExtensions("after.update"); } afterSave(); } public void init(){ } private static final Cache<String,List<FieldValidator<? extends Annotation>>> _fieldValidators = new Cache<String, List<FieldValidator<? extends Annotation>>>() { /** * */ private static final long serialVersionUID = -8174150221673158116L; @Override protected List<FieldValidator<? extends Annotation>> getValue(String pool) { List<FieldValidator<? extends Annotation>> fieldValidators = new ArrayList<FieldValidator<? extends Annotation>>(); fieldValidators.add(new ExactLengthValidator(pool)); fieldValidators.add(new MaxLengthValidator(pool)); fieldValidators.add(new MinLengthValidator(pool)); fieldValidators.add(new NotNullValidator(pool)); fieldValidators.add(new RegExValidator(pool)); fieldValidators.add(new EnumerationValidator(pool)); fieldValidators.add(new DateFormatValidator(pool)); fieldValidators.add(new NumericRangeValidator(pool)); fieldValidators.add(new IntegerRangeValidator(pool)); return fieldValidators; } }; private static final List<ModelValidator> modelValidators = new ArrayList<ModelValidator>(); static{ modelValidators.add(new UniqueKeyValidator()); } protected boolean isModelValid(MultiException ex) { List<String> fields = getReflector().getEditableFields(); boolean ret = true; for (String field : fields) { MultiException fieldException = new MultiException(); if (!getReflector().isHouseKeepingField(field) && !isFieldValid(field,fieldException)) { ex.add(fieldException); ret = false; } } if (ret){ for (ModelValidator v : modelValidators){ ret = v.isValid(getProxy(),ex) && ret; } } return ret; } protected boolean isFieldValid(String field, MultiException fieldException) { boolean ret = true; Iterator<FieldValidator<? extends Annotation>> i = _fieldValidators.get(getPool()).iterator(); while (i.hasNext()) { FieldValidator<? extends Annotation> v = i.next(); ret = v.isValid(getProxy(), field, fieldException) && ret; } return ret; } protected void validate(){ beforeValidate(); MultiException me = new MultiException(); if (!isModelValid(me)) { throw me; } afterValidate(); } private <R extends Model> SequenceSet<String> getExtensionPoints(Class<R> modelClass, String extnPointNameSuffix){ SequenceSet<String> extnPoints = new SequenceSet<String>(); ModelReflector<R> ref = ModelReflector.instance(modelClass); for (Class<? extends Model> inHierarchy : ref.getClassHierarchies()){ String extnPoint = inHierarchy.getSimpleName() + "." + extnPointNameSuffix; extnPoints.add(extnPoint); } return extnPoints; } private <R extends Model> void callExtensions(String extnPointNameSuffix){ for (String extnPoint: getExtensionPoints(getReflector().getModelClass(), extnPointNameSuffix)){ Registry.instance().callExtensions(extnPoint, getProxy()); } } protected void beforeValidate(){ defaultFields(); callExtensions("before.validate"); } public void defaultFields(){ if (!record.isNewRecord()){ ColumnDescriptor updatedAt = getReflector().getColumnDescriptor("updated_at"); ColumnDescriptor updatorUser = getReflector().getColumnDescriptor("updater_user_id"); if (!updatedAt.isVirtual()){ proxy.setUpdatedAt(null); } if (!updatorUser.isVirtual()){ proxy.setUpdaterUserId(null); } } ModelReflector<? extends Model> reflector = getReflector(); for (String field:reflector.getRealFields()){ String columnName = reflector.getColumnDescriptor(field).getName(); if (record.get(columnName) == null){ Method fieldGetter = reflector.getFieldGetter(field); COLUMN_DEF cdef = reflector.getAnnotation(fieldGetter,COLUMN_DEF.class); if (cdef != null){ Object defaultValue = StandardDefaulter.getDefaultValue(cdef.value(),cdef.args(),reflector.getTimeZone()); record.put(columnName,defaultValue); } } } } protected void afterValidate(){ callExtensions("after.validate"); } protected void beforeSave() { callExtensions("before.save"); } protected void afterSave() { callExtensions("after.save"); } protected void beforeDestory(){ callExtensions("before.destroy"); } protected void afterDestroy(){ callExtensions("after.destroy"); } public boolean isBeingDestroyed(){ return beingDestroyed; } private boolean beingDestroyed = false; private void destroyCascade(){ ModelReflector<? extends Model> ref = getReflector(); for (Method childrenGetter : ref.getChildGetters()){ Class<? extends Model> childModelClass = ref.getChildModelClass(childrenGetter); ModelReflector<? extends Model> childReflector = ModelReflector.instance(childModelClass); List<String> referenceFields = childReflector.getReferenceFields(ref.getModelClass()); for (String referenceField: referenceFields){ try { if (childReflector.getRealModelClass() == null){ continue; } @SuppressWarnings("unchecked") List<Model> children = (List<Model>)childrenGetter.invoke(getProxy()); for (Model child : children){ if (childReflector.isFieldMandatory(referenceField)){ child.destroy(); }else { childReflector.set(child,referenceField,null); child.save(); } } } catch (Exception e) { throw new RuntimeException(e); } } } } public void destroy() { if (isBeingDestroyed()){ return; } try { beingDestroyed = true; beforeDestory(); destroyCascade(); Delete q = new Delete(getReflector()); Expression condition = new Expression(getPool(),Conjunction.AND); condition.add(new Expression(getPool(),getReflector().getColumnDescriptor("id").getName(),Operator.EQ,new BindVariable(getPool(),proxy.getId()))); condition.add(new Expression(getPool(),getReflector().getColumnDescriptor("lock_id").getName(),Operator.EQ,new BindVariable(getPool(),proxy.getLockId()))); q.where(condition); if (q.executeUpdate() <= 0){ throw new RecordNotFoundException(); } Database.getInstance().getCache(getReflector()).registerDestroy((Model)getProxy()); Database.getInstance().getCurrentTransaction().registerTableDataChanged(getReflector().getTableName()); afterDestroy(); }finally{ beingDestroyed = false; } } private void update() { int oldLockId = proxy.getLockId(); int newLockId = oldLockId + 1; Update q = new Update(getReflector()); Iterator<String> fI = record.getDirtyFields().iterator(); while (fI.hasNext()) { String columnName = fI.next(); String fieldName = getReflector().getFieldName(columnName); TypeRef<?> ref = Database.getJdbcTypeHelper(getPool()).getTypeRef(getReflector().getFieldGetter(fieldName).getReturnType()); q.set(columnName,new BindVariable(getPool(),record.get(columnName), ref)); } String idColumn = getReflector().getColumnDescriptor("id").getName(); Expression condition = new Expression(getPool(),Conjunction.AND); condition.add(new Expression(getPool(),idColumn,Operator.EQ,new BindVariable(getPool(),proxy.getId()))); ColumnDescriptor lockIdColumDescriptor = getReflector().getColumnDescriptor("lock_id"); if (!lockIdColumDescriptor.isVirtual()){ String lockidColumn = lockIdColumDescriptor.getName(); q.set(lockidColumn,new BindVariable(getPool(),newLockId)); condition.add(new Expression(getPool(),lockidColumn,Operator.EQ,new BindVariable(getPool(),oldLockId))); } q.where(condition); if (q.executeUpdate() <= 0){ throw new RecordNotFoundException(); } proxy.setLockId(newLockId); record.startTracking(); if (!getReflector().isAnnotationPresent(CONFIGURATION.class)){ record.setLocked(true); //Do only for transaction tables as config cache would need to be reset to false after commit. This is just to avoid that unwanted loop over config records cached. } Database.getInstance().getCache(getReflector()).registerUpdate((Model)getProxy()); Database.getInstance().getCurrentTransaction().registerTableDataChanged(getReflector().getTableName()); } private void create() { proxy.setLockId(0); //Table<? extends Model> table = Database.getTable(getReflector().getTableName()); Insert insertSQL = new Insert(getReflector()); Map<String,BindVariable> values = new HashMap<String, BindVariable>(); Iterator<String> columnIterator = record.getDirtyFields().iterator(); while (columnIterator.hasNext()) { String columnName = columnIterator.next(); String fieldName = getReflector().getFieldName(columnName); if (fieldName == null){ continue; } TypeRef<?> ref = Database.getJdbcTypeHelper(getPool()).getTypeRef(getReflector().getFieldGetter(fieldName).getReturnType()); values.put(columnName,new BindVariable(getPool(),record.get(columnName), ref)); } insertSQL.values(values); Record generatedValues = new Record(getPool()); Set<String> autoIncrementColumns = getReflector().getAutoIncrementColumns(); assert (autoIncrementColumns.size() <= 1); // atmost one auto increment id column List<String> generatedKeys = new ArrayList<String>(); for (String anAutoIncrementColumn:autoIncrementColumns){ if ( Database.getJdbcTypeHelper(getPool()).isColumnNameAutoLowerCasedInDB() ){ generatedKeys.add(LowerCaseStringCache.instance().get(anAutoIncrementColumn)); }else { generatedKeys.add(anAutoIncrementColumn); } } insertSQL.executeUpdate(generatedValues, generatedKeys.toArray(new String[]{})); if (generatedKeys.size() == 1){ if (generatedValues.getFieldNames().size() == 1){ String virtualFieldName = generatedValues.getFieldNames().iterator().next(); long id = ((Number)generatedValues.get(virtualFieldName)).longValue(); String fieldName = generatedKeys.get(0); record.put(fieldName, id); } } record.setNewRecord(false); record.startTracking(); if (!getReflector().isAnnotationPresent(CONFIGURATION.class)){ record.setLocked(true); } Database.getInstance().getCache(getReflector()).registerInsert((Model)getProxy()); Database.getInstance().getCurrentTransaction().registerTableDataChanged(getReflector().getTableName()); } @Override public boolean equals(Object o){ if (o == null){ return false; } if (!(o instanceof ModelInvocationHandler) && !getReflector().canReflect(o)){ return false; } if (o instanceof ModelInvocationHandler){ return equalImpl((ModelInvocationHandler)o); }else { return equalsProxy((Model)o); } } public int hashCode(){ return (getModelName() + ":" + getProxy().getId()).hashCode() ; } protected boolean equalImpl(ModelInvocationHandler anotherImpl){ return (getProxy().getId() == anotherImpl.getProxy().getId()) && getReflector().getTableName().equals(anotherImpl.getReflector().getTableName()); } protected boolean equalsProxy(Model anotherProxy){ boolean ret = false; if (anotherProxy != null){ ret = getProxy().getId() == anotherProxy.getId(); } return ret; } @SuppressWarnings("unchecked") public <M extends Model> M cloneProxy(){ return (M)getRawRecord().clone().getAsProxy(getReflector().getModelClass()); } private transient Map<String,Object> txnProperties = new HashMap<String, Object>(); public Object getTxnProperty(String name) { return txnProperties.get(name); } public void setTxnProperty(String name,Object value) { txnProperties.put(name, value); } public Object removeTxnProperty(String name) { return txnProperties.remove(name); } public boolean isDirty(){ return !getProxy().getRawRecord().getDirtyFields().isEmpty(); } }
Redundant participating columns should notbe enforced for Access Control
swf-db/src/main/java/com/venky/swf/db/table/ModelInvocationHandler.java
Redundant participating columns should notbe enforced for Access Control
<ide><path>wf-db/src/main/java/com/venky/swf/db/table/ModelInvocationHandler.java <ide> import com.venky.swf.db.annotations.column.COLUMN_DEF; <ide> import com.venky.swf.db.annotations.column.IS_VIRTUAL; <ide> import com.venky.swf.db.annotations.column.defaulting.StandardDefaulter; <add>import com.venky.swf.db.annotations.column.pm.PARTICIPANT; <ide> import com.venky.swf.db.annotations.column.relationship.CONNECTED_VIA; <ide> import com.venky.swf.db.annotations.column.validations.processors.*; <ide> import com.venky.swf.db.annotations.model.CONFIGURATION; <ide> Map<String,List<Long>> pOptions = pGroupOptions.get(participantRoleGroup); <ide> for (String referencedModelIdFieldName :pOptions.keySet()){ <ide> Number referenceValue = reflector.get(getRawRecord(),referencedModelIdFieldName); <add> <add> PARTICIPANT participant = reflector.getAnnotation(reflector.getFieldGetter(referencedModelIdFieldName), PARTICIPANT.class); <ide> <del> if (pOptions.get(referencedModelIdFieldName).contains(referenceValue)){ <add> if (participant.redundant() || pOptions.get(referencedModelIdFieldName).contains(referenceValue)){ <ide> participantingRoles.add(reflector.getParticipatingRole(referencedModelIdFieldName)); <ide> } <ide> }
Java
apache-2.0
8a5db78b30d65584e7ade269fb81890799745274
0
muntasirsyed/intellij-community,youdonghai/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,asedunov/intellij-community,izonder/intellij-community,ahb0327/intellij-community,semonte/intellij-community,retomerz/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,fnouama/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,clumsy/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,slisson/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,signed/intellij-community,robovm/robovm-studio,samthor/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,izonder/intellij-community,ibinti/intellij-community,semonte/intellij-community,FHannes/intellij-community,diorcety/intellij-community,blademainer/intellij-community,fnouama/intellij-community,FHannes/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,consulo/consulo,Lekanich/intellij-community,supersven/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,kool79/intellij-community,petteyg/intellij-community,xfournet/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,joewalnes/idea-community,ibinti/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,consulo/consulo,retomerz/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,vvv1559/intellij-community,allotria/intellij-community,FHannes/intellij-community,blademainer/intellij-community,samthor/intellij-community,vvv1559/intellij-community,slisson/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,slisson/intellij-community,FHannes/intellij-community,slisson/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,da1z/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,xfournet/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,apixandru/intellij-community,kool79/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,retomerz/intellij-community,clumsy/intellij-community,signed/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,izonder/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,kool79/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,fnouama/intellij-community,allotria/intellij-community,amith01994/intellij-community,signed/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,allotria/intellij-community,petteyg/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,FHannes/intellij-community,vladmm/intellij-community,ibinti/intellij-community,samthor/intellij-community,consulo/consulo,vvv1559/intellij-community,supersven/intellij-community,fitermay/intellij-community,adedayo/intellij-community,holmes/intellij-community,blademainer/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,kool79/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,retomerz/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,signed/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,ernestp/consulo,amith01994/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,amith01994/intellij-community,holmes/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,caot/intellij-community,hurricup/intellij-community,signed/intellij-community,consulo/consulo,michaelgallacher/intellij-community,slisson/intellij-community,asedunov/intellij-community,holmes/intellij-community,amith01994/intellij-community,da1z/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,petteyg/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,akosyakov/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,petteyg/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,kool79/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,semonte/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,supersven/intellij-community,allotria/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,ernestp/consulo,dslomov/intellij-community,allotria/intellij-community,semonte/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,FHannes/intellij-community,caot/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,adedayo/intellij-community,asedunov/intellij-community,apixandru/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,consulo/consulo,asedunov/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,dslomov/intellij-community,adedayo/intellij-community,holmes/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,retomerz/intellij-community,caot/intellij-community,dslomov/intellij-community,joewalnes/idea-community,semonte/intellij-community,TangHao1987/intellij-community,caot/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,joewalnes/idea-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,ibinti/intellij-community,slisson/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,Distrotech/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,holmes/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,signed/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,kool79/intellij-community,blademainer/intellij-community,signed/intellij-community,alphafoobar/intellij-community,signed/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,izonder/intellij-community,allotria/intellij-community,izonder/intellij-community,caot/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,clumsy/intellij-community,jagguli/intellij-community,ibinti/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,kdwink/intellij-community,youdonghai/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,holmes/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,fnouama/intellij-community,ernestp/consulo,youdonghai/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,joewalnes/idea-community,MichaelNedzelsky/intellij-community,ernestp/consulo,orekyuu/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,holmes/intellij-community,semonte/intellij-community,dslomov/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,samthor/intellij-community,izonder/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,asedunov/intellij-community,izonder/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,joewalnes/idea-community,hurricup/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,caot/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,akosyakov/intellij-community,ernestp/consulo,suncycheng/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,joewalnes/idea-community,caot/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,joewalnes/idea-community,wreckJ/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,caot/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,signed/intellij-community,amith01994/intellij-community,retomerz/intellij-community,signed/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,robovm/robovm-studio,kool79/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,joewalnes/idea-community,xfournet/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,hurricup/intellij-community,kool79/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,da1z/intellij-community,fitermay/intellij-community,robovm/robovm-studio,kdwink/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,caot/intellij-community,caot/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,supersven/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,samthor/intellij-community,ernestp/consulo,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,kdwink/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,izonder/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,holmes/intellij-community,ahb0327/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,nicolargo/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,semonte/intellij-community,dslomov/intellij-community,clumsy/intellij-community,clumsy/intellij-community,hurricup/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,ryano144/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,da1z/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,vladmm/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,diorcety/intellij-community,kool79/intellij-community,fnouama/intellij-community,fitermay/intellij-community,diorcety/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,semonte/intellij-community,holmes/intellij-community,signed/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,samthor/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,jagguli/intellij-community,blademainer/intellij-community,consulo/consulo,kdwink/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,jagguli/intellij-community,kool79/intellij-community,Lekanich/intellij-community,da1z/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,semonte/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,adedayo/intellij-community,FHannes/intellij-community,robovm/robovm-studio,alphafoobar/intellij-community,apixandru/intellij-community,robovm/robovm-studio,semonte/intellij-community,allotria/intellij-community,fnouama/intellij-community,orekyuu/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.debugger; import com.intellij.debugger.engine.evaluation.CodeFragmentKind; import com.intellij.debugger.engine.evaluation.TextWithImports; import com.intellij.debugger.engine.evaluation.TextWithImportsImpl; import com.intellij.debugger.impl.EditorTextProvider; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiEnumConstant; import com.intellij.psi.PsiVariable; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrCall; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrThisReferenceExpression; /** * @author Maxim.Medvedev */ public class GroovyEditorTextProvider implements EditorTextProvider { @Override public TextWithImports getEditorText(PsiElement elementAtCaret) { String result = ""; PsiElement element = findExpression(elementAtCaret); if (element != null) { if (element instanceof GrReferenceExpression) { final GrReferenceExpression reference = (GrReferenceExpression)element; if (reference.getQualifier() == null) { final PsiElement resolved = reference.resolve(); if (resolved instanceof PsiEnumConstant) { final PsiEnumConstant enumConstant = (PsiEnumConstant)resolved; final PsiClass enumClass = enumConstant.getContainingClass(); if (enumClass != null) { result = enumClass.getName() + "." + enumConstant.getName(); } } } } if (result.length() == 0) { result = element.getText(); } } return new TextWithImportsImpl(CodeFragmentKind.EXPRESSION, result); } @Nullable private static PsiElement findExpression(PsiElement element) { PsiElement parent = element.getParent(); if (parent instanceof GrVariable && element == ((GrVariable)parent).getNameIdentifierGroovy()) { return element; } if (parent instanceof GrReferenceExpression) { if (parent.getParent() instanceof GrCall) return parent.getParent(); return parent; } if (parent instanceof GrThisReferenceExpression) { return parent; } return null; } }
plugins/groovy/src/org/jetbrains/plugins/groovy/debugger/GroovyEditorTextProvider.java
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.plugins.groovy.debugger; import com.intellij.debugger.engine.evaluation.CodeFragmentKind; import com.intellij.debugger.engine.evaluation.TextWithImports; import com.intellij.debugger.engine.evaluation.TextWithImportsImpl; import com.intellij.debugger.impl.EditorTextProvider; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiEnumConstant; import com.intellij.psi.PsiVariable; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrCall; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression; import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrThisReferenceExpression; /** * @author Maxim.Medvedev */ public class GroovyEditorTextProvider implements EditorTextProvider { @Override public TextWithImports getEditorText(PsiElement elementAtCaret) { String result = ""; PsiElement element = findExpression(elementAtCaret); if (element != null) { if (element instanceof GrReferenceExpression) { final GrReferenceExpression reference = (GrReferenceExpression)element; if (reference.getQualifier() == null) { final PsiElement resolved = reference.resolve(); if (resolved instanceof PsiEnumConstant) { final PsiEnumConstant enumConstant = (PsiEnumConstant)resolved; final PsiClass enumClass = enumConstant.getContainingClass(); if (enumClass != null) { result = enumClass.getName() + "." + enumConstant.getName(); } } } } if (result.length() == 0) { result = element.getText(); } } return new TextWithImportsImpl(CodeFragmentKind.EXPRESSION, result); } @Nullable private static PsiElement findExpression(PsiElement element) { PsiElement parent = element.getParent(); if (parent instanceof PsiVariable && element == ((PsiVariable)parent).getNameIdentifier()) { return element; } if (parent instanceof GrReferenceExpression) { if (parent.getParent() instanceof GrCall) return parent.getParent(); return parent; } if (parent instanceof GrThisReferenceExpression) { return parent; } return null; } }
IDEA-59779 evaluate expression with caret on groovy variable declaration
plugins/groovy/src/org/jetbrains/plugins/groovy/debugger/GroovyEditorTextProvider.java
IDEA-59779 evaluate expression with caret on groovy variable declaration
<ide><path>lugins/groovy/src/org/jetbrains/plugins/groovy/debugger/GroovyEditorTextProvider.java <ide> import com.intellij.psi.PsiEnumConstant; <ide> import com.intellij.psi.PsiVariable; <ide> import org.jetbrains.annotations.Nullable; <add>import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable; <ide> import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrCall; <ide> import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrReferenceExpression; <ide> import org.jetbrains.plugins.groovy.lang.psi.api.statements.expressions.GrThisReferenceExpression; <ide> @Nullable <ide> private static PsiElement findExpression(PsiElement element) { <ide> PsiElement parent = element.getParent(); <del> if (parent instanceof PsiVariable && element == ((PsiVariable)parent).getNameIdentifier()) { <add> if (parent instanceof GrVariable && element == ((GrVariable)parent).getNameIdentifierGroovy()) { <ide> return element; <ide> } <ide> if (parent instanceof GrReferenceExpression) {
Java
apache-2.0
469f9eea81d783644869c355b5c4af315ac39c2c
0
apache/uima-sandbox,apache/uima-sandbox,apache/uima-sandbox
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.aae.handler.input; import org.apache.uima.UIMAFramework; import org.apache.uima.aae.UIMAEE_Constants; import org.apache.uima.aae.UimaSerializer; import org.apache.uima.aae.InProcessCache.CacheEntry; import org.apache.uima.aae.controller.AggregateAnalysisEngineController; import org.apache.uima.aae.controller.Endpoint; import org.apache.uima.aae.controller.Endpoint_impl; import org.apache.uima.aae.controller.PrimitiveAnalysisEngineController; import org.apache.uima.aae.controller.LocalCache.CasStateEntry; import org.apache.uima.aae.error.AsynchAEException; import org.apache.uima.aae.error.ErrorContext; import org.apache.uima.aae.error.InvalidMessageException; import org.apache.uima.aae.handler.HandlerBase; import org.apache.uima.aae.jmx.ServicePerformance; import org.apache.uima.aae.message.AsynchAEMessage; import org.apache.uima.aae.message.MessageContext; import org.apache.uima.aae.monitor.Monitor; import org.apache.uima.aae.monitor.statistics.DelegateStats; import org.apache.uima.aae.monitor.statistics.LongNumericStatistic; import org.apache.uima.aae.monitor.statistics.TimerStats; import org.apache.uima.analysis_engine.asb.impl.FlowContainer; import org.apache.uima.cas.CAS; import org.apache.uima.cas.Marker; import org.apache.uima.cas.impl.XmiSerializationSharedData; import org.apache.uima.util.Level; public class ProcessRequestHandler_impl extends HandlerBase { private static final Class CLASS_NAME = ProcessRequestHandler_impl.class; private Object mux = new Object(); private UimaSerializer uimaSerializer = new UimaSerializer(); public ProcessRequestHandler_impl(String aName) { super(aName); } private void cacheStats(String aCasReferenceId, long aTimeWaitingForCAS, long aTimeToDeserializeCAS ) throws Exception { CacheEntry entry = getController().getInProcessCache().getCacheEntryForCAS(aCasReferenceId); entry.incrementTimeWaitingForCAS( aTimeWaitingForCAS); entry.incrementTimeToDeserializeCAS(aTimeToDeserializeCAS); } private boolean messageContainsXMI(MessageContext aMessageContext, String casReferenceId) throws Exception { // Fetch serialized CAS from the message String xmi = aMessageContext.getStringMessage(); // ***************************************************************** // ***** NO XMI In Message. Kick this back to sender with exception // ***************************************************************** if ( xmi == null ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_message_has_no_cargo__INFO", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } getController(). getOutputChannel(). sendReply(new InvalidMessageException("No XMI data in message"), casReferenceId, aMessageContext.getEndpoint(),AsynchAEMessage.Process); // Dont process this empty message return false; } return true; } private synchronized CAS getCAS( boolean fetchCASFromShadowCasPool, String shadowCasPoolKey, String casReceivedFrom ) { CAS cas = null; // If this is a new CAS (generated by a CM), fetch a CAS from a Shadow Cas Pool associated with a CM that // produced the CAS. Each CM will have its own Shadow Cas Pool if ( fetchCASFromShadowCasPool ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINEST)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINEST, CLASS_NAME.getName(), "getCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas_cm__FINE", new Object[] { shadowCasPoolKey }); } // Aggregate time spent waiting for a CAS in the shadow cas pool ((AggregateAnalysisEngineController)getController()).getDelegateServicePerformance(shadowCasPoolKey).beginWaitOnShadowCASPool(); cas = getController().getCasManagerWrapper().getNewCas(shadowCasPoolKey); ((AggregateAnalysisEngineController)getController()).getDelegateServicePerformance(shadowCasPoolKey).endWaitOnShadowCASPool(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "getCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas_granted_cm__FINE", new Object[] { shadowCasPoolKey }); } } else { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINEST)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINEST, CLASS_NAME.getName(), "getCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas__FINE", new Object[] { casReceivedFrom }); } // Aggregate time spent waiting for a CAS in the service cas pool getController().getServicePerformance().beginWaitOnCASPool(); cas = getController().getCasManagerWrapper().getNewCas(); getController().getServicePerformance().endWaitOnCASPool(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "getCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas_granted__FINE", new Object[] { casReceivedFrom }); } } return cas; } /** * * @param casReferenceId * @param freeCasEndpoint * @param shadowCasPoolKey * @param aMessageContext * @return * @throws Exception */ private CacheEntry deserializeCASandRegisterWithCache( String casReferenceId, Endpoint freeCasEndpoint, String shadowCasPoolKey, MessageContext aMessageContext) throws Exception { long inTime = System.nanoTime(); boolean casRegistered = false; // Time how long we wait on Cas Pool to fetch a new CAS long t1 = getController().getCpuTime(); // ************************************************************************* // Fetch CAS from a Cas Pool. If the CAS came from a Cas Multiplier // fetch the CAS from a shadow CAS pool. Otherwise, fetch the CAS // from the service CAS Pool. // ************************************************************************* Endpoint endpoint = aMessageContext.getEndpoint(); CAS cas = getCAS(aMessageContext.propertyExists(AsynchAEMessage.CasSequence), shadowCasPoolKey,endpoint.getEndpoint()); long timeWaitingForCAS = getController().getCpuTime() - t1; // Check if we are still running if ( getController().isStopped() ) { // The Controller is in shutdown state. getController().dropCAS(cas); return null; } // ************************************************************************* // Deserialize CAS from the message // ************************************************************************* t1 = getController().getCpuTime(); String serializationStrategy = endpoint.getSerializer(); XmiSerializationSharedData deserSharedData = null; CacheEntry entry = null; if ( serializationStrategy.equals("xmi")) { // Fetch serialized CAS from the message String xmi = aMessageContext.getStringMessage(); deserSharedData = new XmiSerializationSharedData(); // UimaSerializer.deserializeCasFromXmi(xmi, cas, deserSharedData, true, -1); uimaSerializer.deserializeCasFromXmi(xmi, cas, deserSharedData, true, -1); } else if ( serializationStrategy.equals("binary")) { // ************************************************************************* // Register the CAS with a local cache // ************************************************************************* //CacheEntry entry = getController().getInProcessCache().register(cas, aMessageContext, deserSharedData, casReferenceId); byte[] binarySource = aMessageContext.getByteMessage(); // UimaSerializer.deserializeCasFromBinary(binarySource, cas); uimaSerializer.deserializeCasFromBinary(binarySource, cas); } // ************************************************************************* // Check and set up for Delta CAS reply // ************************************************************************* boolean acceptsDeltaCas = false; Marker marker = null; if (aMessageContext.propertyExists(AsynchAEMessage.AcceptsDeltaCas)) { acceptsDeltaCas = aMessageContext.getMessageBooleanProperty(AsynchAEMessage.AcceptsDeltaCas); if (acceptsDeltaCas ) { marker = cas.createMarker(); } } // ************************************************************************* // Register the CAS with a local cache // ************************************************************************* //CacheEntry entry = getController().getInProcessCache().register(cas, aMessageContext, deserSharedData, casReferenceId); entry = getController().getInProcessCache().register(cas, aMessageContext, deserSharedData, casReferenceId, marker, acceptsDeltaCas); long timeToDeserializeCAS = getController().getCpuTime() - t1; getController().incrementDeserializationTime(timeToDeserializeCAS); LongNumericStatistic statistic; if ( (statistic = getController().getMonitor().getLongNumericStatistic("",Monitor.TotalDeserializeTime)) != null ) { statistic.increment(timeToDeserializeCAS); } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_deserialize_cas_time_FINE", new Object[] { (double)timeToDeserializeCAS / 1000000.0}); } // Update Stats ServicePerformance casStats = getController().getCasStatistics(casReferenceId); casStats.incrementCasDeserializationTime(timeToDeserializeCAS); if ( getController().isTopLevelComponent() ) { synchronized( mux ) { getController().getServicePerformance().incrementCasDeserializationTime(timeToDeserializeCAS); } } getController().saveTime(inTime, casReferenceId, getController().getName()); if ( getController() instanceof AggregateAnalysisEngineController ) { // If the message came from a Cas Multiplier, associate the input/parent CAS id with this CAS if ( aMessageContext.propertyExists(AsynchAEMessage.CasSequence) ) { // Fetch parent CAS id String inputCasReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.InputCasReference); if ( shadowCasPoolKey != null ) { // Save the key of the Cas Multiplier in the cache. It will be now known which Cas Multiplier produced this CAS entry.setCasMultiplierKey(shadowCasPoolKey); } // associate this subordinate CAS with the parent CAS entry.setInputCasReferenceId(inputCasReferenceId); // Save a Cas Multiplier endpoint where a Free CAS notification will be sent entry.setFreeCasEndpoint(freeCasEndpoint); cacheStats( inputCasReferenceId, timeWaitingForCAS, timeToDeserializeCAS); } else { cacheStats( casReferenceId, timeWaitingForCAS, timeToDeserializeCAS); } DelegateStats stats = new DelegateStats(); if ( entry.getStat() == null ) { entry.setStat(stats); // Add entry for self (this aggregate). MessageContext.getEndpointName() // returns the name of the queue receiving the message. stats.put(getController().getServiceEndpointName(), new TimerStats()); } else { if (!stats.containsKey(getController().getServiceEndpointName())) { stats.put(getController().getServiceEndpointName(), new DelegateStats()); } } } else { cacheStats( casReferenceId, timeWaitingForCAS, timeToDeserializeCAS); } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_deserialized_cas_ready_to_process_FINE", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } cacheProcessCommandInClientEndpoint(); return entry; } private String getCasReferenceId( MessageContext aMessageContext ) throws Exception { if ( !aMessageContext.propertyExists(AsynchAEMessage.CasReference) ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_message_has_cas_refid__INFO", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } getController(). getOutputChannel(). sendReply(new InvalidMessageException("No Cas Reference Id Received From Delegate In message"), null, aMessageContext.getEndpoint(),AsynchAEMessage.Process); return null; } return aMessageContext.getMessageStringProperty(AsynchAEMessage.CasReference); } /** * Handles process request from a remote client * * @param aMessageContext - contains a message from UIMA-AS Client * @throws AsynchAEException */ private void handleProcessRequestFromRemoteDelegate(MessageContext aMessageContext) throws AsynchAEException { CacheEntry entry = null; String casReferenceId = null; // Check if there is a cargo in the message if ( aMessageContext.getMessageIntProperty(AsynchAEMessage.Payload)== AsynchAEMessage.XMIPayload && aMessageContext.getStringMessage() == null ) { return; // No XMI just return } try { String newCASProducedBy = null; // Get the CAS Reference Id of the input CAS // Fetch id of the CAS from the message. If it doesnt exist the method will create an entry in the log file and return null casReferenceId = getCasReferenceId(aMessageContext); if ( casReferenceId == null ) { return; // Invalid message. Nothing to do } // Initially make both equal String inputCasReferenceId = casReferenceId; // Destination where Free Cas Notification will be sent if the CAS came from a Cas Multiplier Endpoint freeCasEndpoint = null; // CASes generated by a Cas Multiplier will have a CasSequence property set. if ( aMessageContext.propertyExists(AsynchAEMessage.CasSequence) ) { // Fetch an ID of the parent CAS inputCasReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.InputCasReference); // Fetch Cache entry for the parent CAS CacheEntry inputCasCacheEntry = getController().getInProcessCache().getCacheEntryForCAS(inputCasReferenceId); CasStateEntry casStateEntry = null; if ( getController() instanceof AggregateAnalysisEngineController ) { casStateEntry = ((AggregateAnalysisEngineController)getController()). getLocalCache().lookupEntry(inputCasReferenceId); casStateEntry.incrementSubordinateCasInPlayCount(); } computeStats(aMessageContext, inputCasReferenceId); // Fetch an endpoint where Free CAS Notification must be sent. // This endpoint is unique per CM instance. Meaning, each // instance of CM will have an endpoint where it expects Free CAS // notifications. freeCasEndpoint = aMessageContext.getEndpoint(); // Clone an endpoint where Free Cas Request will be sent freeCasEndpoint = (Endpoint)((Endpoint_impl)freeCasEndpoint).clone(); // Reset the destination aMessageContext.getEndpoint().setDestination(null); // This CAS came in from a CAS Multiplier. Treat it differently than the // input CAS. In case the Aggregate needs to send this CAS to the // client, retrieve the client destination by looking up the client endpoint // using input CAS reference id. CASes generated by the CAS multiplier will have // the same Cas Reference id. Endpoint replyToEndpoint = inputCasCacheEntry.getMessageOrigin(); // The message context contains a Cas Multiplier endpoint. Since // we dont want to send a generated CAS back to the CM, override // with an endpoint provided by the client of // this service. Client endpoint is attached to an input Cas cache entry. aMessageContext.getEndpoint().setEndpoint(replyToEndpoint.getEndpoint()); aMessageContext.getEndpoint().setServerURI(replyToEndpoint.getServerURI()); // Before sending a CAS to Cas Multiplier, the aggregate has // saved the CM key in the CAS cache entry. Fetch the key // of the CM so that we can ask the right Shadow Cas Pool for // a new CAS. Every Shadow Cas Pool has a unique id which // corresponds to a Cas Multiplier key. newCASProducedBy = inputCasCacheEntry.getCasMultiplierKey(); if ( getController() instanceof AggregateAnalysisEngineController ) { Endpoint casMultiplierEndpoint = ((AggregateAnalysisEngineController)getController()).lookUpEndpoint(newCASProducedBy, false); if ( casMultiplierEndpoint != null ) { // Save the URL of the broker managing the Free Cas Notification queue. // This is needed when we try to establish a connection to the broker. freeCasEndpoint.setServerURI(casMultiplierEndpoint.getServerURI()); } } // increment number of CASes produced from an input CAS // The input CAS (parent) will be held by // the aggregate until all of its subordinate CASes are // fully processed. Only then, the aggregate can return // it back to the client } else if ( getController().isTopLevelComponent() && getController() instanceof AggregateAnalysisEngineController ) { ((AggregateAnalysisEngineController)getController()).addMessageOrigin(casReferenceId, aMessageContext.getEndpoint()); } // To prevent processing multiple messages with the same CasReferenceId, check the CAS cache // to see if the message with a given CasReferenceId is already being processed. It is, the // message contains the same request possibly issued by the caller due to timeout. Also this // mechanism helps with dealing with scenario when this service is not up when the client sends // request. The client can keep re-sending the same request until its timeout thresholds are // exceeded. By that time, there may be multiple messages in this service queue with the same // CasReferenceId. When the service finally comes back up, it will have multiple messages in // its queue possibly from the same client. Only the first message for any given CasReferenceId // should be processed. if ( !getController().getInProcessCache().entryExists(casReferenceId) ) { entry = deserializeCASandRegisterWithCache( casReferenceId, freeCasEndpoint, newCASProducedBy, aMessageContext); if ( getController().isStopped() || entry == null || entry.getCas() == null) { if ( entry != null ) { // The Controller is in shutdown state, release the CAS getController().dropCAS( entry.getCasReferenceId(), true); entry = null; } return; } // ***************************************************************** // Process the CAS // ***************************************************************** invokeProcess(entry.getCas(), inputCasReferenceId, casReferenceId, aMessageContext, newCASProducedBy); } else { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_duplicate_request__INFO", new Object[] { casReferenceId}); } } } catch ( Exception e) { e.printStackTrace(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.WARNING)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.WARNING, getClass().getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_exception__WARNING", e); } ErrorContext errorContext = new ErrorContext(); errorContext.add(AsynchAEMessage.Endpoint, aMessageContext.getEndpoint()); errorContext.add(AsynchAEMessage.Command, AsynchAEMessage.Process); errorContext.add(AsynchAEMessage.CasReference, casReferenceId ); if ( entry != null ) { getController().dropCAS(entry.getCas()); } getController().getErrorHandlerChain().handle(e, errorContext, getController()); } } private void handleProcessRequestWithCASReference(MessageContext aMessageContext) throws AsynchAEException { boolean isNewCAS = false; String newCASProducedBy = null; try { // This is only used when handling CASes produced by CAS Multiplier String inputCasReferenceId = null; CAS cas = null; String casReferenceId = getCasReferenceId(aMessageContext); // Check if this Cas has been sent from a Cas Multiplier. If so, its sequence will be > 0 if ( aMessageContext.propertyExists(AsynchAEMessage.CasSequence) ) { isNewCAS = true; Endpoint casMultiplierEndpoint = aMessageContext.getEndpoint(); if ( casMultiplierEndpoint == null ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_no_endpoint_for_reply__INFO", new Object[] { casReferenceId }); } return; } // Get the id of the parent Cas inputCasReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.InputCasReference); if ( getController() instanceof AggregateAnalysisEngineController ) { // Save the endpoint of the CM which produced the Cas getController().getInProcessCache().setCasProducer(casReferenceId, casMultiplierEndpoint.getEndpoint()); // Convert the endpoint to a key newCASProducedBy = ((AggregateAnalysisEngineController)getController()).lookUpDelegateKey(casMultiplierEndpoint.getEndpoint()); casMultiplierEndpoint.setIsCasMultiplier(true); // Safety check. The input Cas should not be null here if ( inputCasReferenceId != null ) { try { Endpoint endp = null; // Located the origin of the parent Cas. The produced Cas will inherit the origin from its parent. // Once the origin is identified, save the origin using the produced Cas id as a key. if ( endp == null ) { boolean gotTheEndpoint = false; String parentCasId = inputCasReferenceId; // Loop through the parent tree until an origin is found while( !gotTheEndpoint ) { // Check if the current parent has an associated origin endp = ((AggregateAnalysisEngineController)getController()).getMessageOrigin(parentCasId); // Check if there is an origin. If so, we are done if ( endp != null ) { break; } // The current parent has no origin, get its parent and try again CacheEntry entry = getController().getInProcessCache().getCacheEntryForCAS(parentCasId); parentCasId = entry.getInputCasReferenceId(); // Check if we reached the top of the hierarchy tree. If so, we have no origin. This should // never be the case. Every Cas must have an origin if ( parentCasId == null ) { break; } } } // If origin not found log it as this indicates an error if ( endp == null ) { System.out.println("Endpoint Not Found For Cas Id:"+inputCasReferenceId); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_msg_origin_not_found__INFO", new Object[] { getController().getComponentName(), inputCasReferenceId }); } } else { ((AggregateAnalysisEngineController)getController()).addMessageOrigin(casReferenceId, endp); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINEST)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINEST, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_msg_origin_added__FINEST", new Object[] { getController().getComponentName(), casReferenceId, newCASProducedBy }); } } } catch( Exception e) { e.printStackTrace(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.WARNING)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.WARNING, getClass().getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_exception__WARNING", e); } } } else { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_input_cas_invalid__INFO", new Object[] { getController().getComponentName(), newCASProducedBy, casReferenceId }); } } } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_new_cas__FINE", new Object[] { casReferenceId, newCASProducedBy }); } aMessageContext.getEndpoint().setEndpoint(casMultiplierEndpoint.getEndpoint()); aMessageContext.getEndpoint().setServerURI(casMultiplierEndpoint.getServerURI()); } else { if ( getController() instanceof AggregateAnalysisEngineController ) { ((AggregateAnalysisEngineController)getController()).addMessageOrigin(casReferenceId, aMessageContext.getEndpoint()); } } cas = getController().getInProcessCache().getCasByReference(casReferenceId); long arrivalTime = System.nanoTime(); getController().saveTime(arrivalTime, casReferenceId, getController().getName());//aMessageContext.getEndpointName()); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_analyzing_cas__FINE", new Object[] { casReferenceId}); } // Save Process command in the client endpoint. cacheProcessCommandInClientEndpoint(); if ( getController().isStopped() ) { return; } if ( isNewCAS ) { invokeProcess(cas, inputCasReferenceId, casReferenceId, aMessageContext, newCASProducedBy); } else { invokeProcess(cas, casReferenceId, null, aMessageContext, newCASProducedBy); } } catch ( AsynchAEException e) { throw e; } catch ( Exception e) { throw new AsynchAEException(e); } } private void handleProcessRequestWithXCAS(MessageContext aMessageContext) throws AsynchAEException { try { // Get the CAS Reference Id of the input CAS String casReferenceId = getCasReferenceId(aMessageContext); String inputCasReferenceId = casReferenceId; // This is only used when handling CASes produced by CAS Multiplier String newCASProducedBy = null; if ( aMessageContext.propertyExists(AsynchAEMessage.CasSequence) ) { // This CAS came in from the CAS Multiplier. Treat it differently than the // input CAS. First, in case the Aggregate needs to send this CAS to the // client, retrieve the client destination by looking up the client endpoint // using input CAS reference id. CASes generated by the CAS multiplier will have // the same Cas Reference id. Endpoint replyToEndpoint = getController(). getInProcessCache(). getCacheEntryForCAS(casReferenceId).getMessageOrigin(); // if ( getController() instanceof AggregateAnalysisEngineController ) { newCASProducedBy = ((AggregateAnalysisEngineController)getController()).lookUpDelegateKey(replyToEndpoint.getEndpoint()); } // MessageContext contains endpoint set by the CAS Multiplier service. Overwrite // this with the endpoint of the client who sent the input CAS. In case this // aggregate is configured to send new CASes to the client we know where to send them. aMessageContext.getEndpoint().setEndpoint(replyToEndpoint.getEndpoint()); aMessageContext.getEndpoint().setServerURI(replyToEndpoint.getServerURI()); inputCasReferenceId = String.valueOf(casReferenceId); // Set this to null so that the new CAS gets its own Cas Reference Id below casReferenceId = null; } long arrivalTime = System.nanoTime(); getController().saveTime(arrivalTime, casReferenceId, getController().getName());//aMessageContext.getEndpointName()); // To prevent processing multiple messages with the same CasReferenceId, check the CAS cache // to see if the message with a given CasReferenceId is already being processed. It is, the // message contains the same request possibly issued by the caller due to timeout. Also this // mechanism helps with dealing with scenario when this service is not up when the client sends // request. The client can keep re-sending the same request until its timeout thresholds are // exceeded. By that time, there may be multiple messages in this service queue with the same // CasReferenceId. When the service finally comes back up, it will have multiple messages in // its queue possibly from the same client. Only the first message for any given CasReferenceId // should be processed. if ( casReferenceId == null || !getController().getInProcessCache().entryExists(casReferenceId) ) { String xmi = aMessageContext.getStringMessage(); // ***************************************************************** // ***** NO XMI In Message. Kick this back to sender with exception // ***************************************************************** if ( xmi == null ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_message_has_no_cargo__INFO", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } getController(). getOutputChannel(). sendReply(new InvalidMessageException("No XMI data in message"), casReferenceId, aMessageContext.getEndpoint(),AsynchAEMessage.Process); // Dont process this empty message return; } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas__FINE", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } CAS cas = getController().getCasManagerWrapper().getNewCas(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas_granted__FINE", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } XmiSerializationSharedData deserSharedData = new XmiSerializationSharedData(); // UimaSerializer.deserializeCasFromXmi(xmi, cas, deserSharedData, true, -1); uimaSerializer.deserializeCasFromXmi(xmi, cas, deserSharedData, true, -1); if (casReferenceId == null) { CacheEntry entry = getController().getInProcessCache().register(cas, aMessageContext, deserSharedData); casReferenceId = entry.getCasReferenceId(); } else { if (getController() instanceof PrimitiveAnalysisEngineController) { getController().getInProcessCache().register(cas, aMessageContext, deserSharedData, casReferenceId); } } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_deserialized_cas_ready_to_process_FINE", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } cacheProcessCommandInClientEndpoint(); invokeProcess(cas, inputCasReferenceId, casReferenceId, aMessageContext, newCASProducedBy); } else { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_duplicate_request__INFO", new Object[] { casReferenceId}); } } } catch ( AsynchAEException e) { throw e; } catch ( Exception e) { throw new AsynchAEException(e); } } private void cacheProcessCommandInClientEndpoint() { Endpoint clientEndpoint = getController().getClientEndpoint(); if ( clientEndpoint != null ) { clientEndpoint.setCommand(AsynchAEMessage.Process); } } private void handleCollectionProcessCompleteRequest(MessageContext aMessageContext) throws AsynchAEException { Endpoint replyToEndpoint = aMessageContext.getEndpoint(); getController().collectionProcessComplete(replyToEndpoint); } private void handleReleaseCASRequest(MessageContext aMessageContext) throws AsynchAEException { String casReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.CasReference); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleReleaseCASRequest", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_release_cas_req__FINE", new Object[] { getController().getName(), casReferenceId }); } getController().releaseNextCas(casReferenceId); } private void handlePingRequest(MessageContext aMessageContext) { try { getController().getOutputChannel().sendReply(AsynchAEMessage.Ping, aMessageContext.getEndpoint()); } catch ( Exception e) { e.printStackTrace(); } } private void handleStopRequest(MessageContext aMessageContext) { System.out.println("###################Controller::"+getController().getComponentName()+" Received <<<STOP>>> Request"); if ( getController() instanceof PrimitiveAnalysisEngineController ) { try { String casReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.CasReference); ( (PrimitiveAnalysisEngineController)getController()).addAbortedCasReferenceId(casReferenceId); } catch( Exception e){} } } /** * Main method called by the predecessor handler. * * */ public void handle(Object anObjectToHandle) //throws AsynchAEException { try { super.validate(anObjectToHandle); MessageContext messageContext = (MessageContext) anObjectToHandle; if ( isHandlerForMessage(messageContext, AsynchAEMessage.Request, AsynchAEMessage.Process) || isHandlerForMessage(messageContext, AsynchAEMessage.Request, AsynchAEMessage.CollectionProcessComplete) || isHandlerForMessage(messageContext, AsynchAEMessage.Request, AsynchAEMessage.ReleaseCAS ) || isHandlerForMessage(messageContext, AsynchAEMessage.Request, AsynchAEMessage.Stop ) ) { int payload = messageContext.getMessageIntProperty(AsynchAEMessage.Payload); int command = messageContext.getMessageIntProperty(AsynchAEMessage.Command); getController().getControllerLatch().waitUntilInitialized(); // If a Process Request, increment number of CASes processed if (messageContext.getMessageIntProperty(AsynchAEMessage.MessageType) == AsynchAEMessage.Request && command == AsynchAEMessage.Process &&!messageContext.propertyExists(AsynchAEMessage.CasSequence)) { // Increment number of CASes processed by this service getController().getServicePerformance().incrementNumberOfCASesProcessed(); } if ( getController().isStopped() ) { return; } if (AsynchAEMessage.CASRefID == payload) { // Fetch id of the CAS from the message. if ( getCasReferenceId(messageContext) == null ) { return; // Invalid message. Nothing to do } handleProcessRequestWithCASReference(messageContext); } else if (AsynchAEMessage.XMIPayload == payload || AsynchAEMessage.BinaryPayload == payload) { // Fetch id of the CAS from the message. if ( getCasReferenceId(messageContext) == null ) { return; // Invalid message. Nothing to do } handleProcessRequestFromRemoteDelegate(messageContext); } else if (AsynchAEMessage.XCASPayload == payload) { // Fetch id of the CAS from the message. if ( getCasReferenceId(messageContext) == null ) { return; // Invalid message. Nothing to do } handleProcessRequestWithXCAS(messageContext); } else if ( AsynchAEMessage.None == payload && AsynchAEMessage.CollectionProcessComplete == command) { handleCollectionProcessCompleteRequest(messageContext); } else if ( AsynchAEMessage.None == payload && AsynchAEMessage.ReleaseCAS == command) { handleReleaseCASRequest(messageContext); } else if ( AsynchAEMessage.None == payload && AsynchAEMessage.Stop == command) { handleStopRequest(messageContext); } else if ( AsynchAEMessage.None == payload && AsynchAEMessage.Ping == command) { handlePingRequest(messageContext); } // Handled Request return; } // Not a Request nor Command. Delegate to the next handler in the chain super.delegate(messageContext); } catch( Exception e) { e.printStackTrace(); getController().getErrorHandlerChain().handle(e, HandlerBase.populateErrorContext( (MessageContext)anObjectToHandle ), getController()); } } }
uima-as/uimaj-as-core/src/main/java/org/apache/uima/aae/handler/input/ProcessRequestHandler_impl.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.aae.handler.input; import org.apache.uima.UIMAFramework; import org.apache.uima.aae.UIMAEE_Constants; import org.apache.uima.aae.UimaSerializer; import org.apache.uima.aae.InProcessCache.CacheEntry; import org.apache.uima.aae.controller.AggregateAnalysisEngineController; import org.apache.uima.aae.controller.Endpoint; import org.apache.uima.aae.controller.Endpoint_impl; import org.apache.uima.aae.controller.PrimitiveAnalysisEngineController; import org.apache.uima.aae.controller.LocalCache.CasStateEntry; import org.apache.uima.aae.error.AsynchAEException; import org.apache.uima.aae.error.ErrorContext; import org.apache.uima.aae.error.InvalidMessageException; import org.apache.uima.aae.handler.HandlerBase; import org.apache.uima.aae.jmx.ServicePerformance; import org.apache.uima.aae.message.AsynchAEMessage; import org.apache.uima.aae.message.MessageContext; import org.apache.uima.aae.monitor.Monitor; import org.apache.uima.aae.monitor.statistics.DelegateStats; import org.apache.uima.aae.monitor.statistics.LongNumericStatistic; import org.apache.uima.aae.monitor.statistics.TimerStats; import org.apache.uima.analysis_engine.asb.impl.FlowContainer; import org.apache.uima.cas.CAS; import org.apache.uima.cas.Marker; import org.apache.uima.cas.impl.XmiSerializationSharedData; import org.apache.uima.util.Level; public class ProcessRequestHandler_impl extends HandlerBase { private static final Class CLASS_NAME = ProcessRequestHandler_impl.class; private Object mux = new Object(); private UimaSerializer uimaSerializer = new UimaSerializer(); public ProcessRequestHandler_impl(String aName) { super(aName); } private void cacheStats(String aCasReferenceId, long aTimeWaitingForCAS, long aTimeToDeserializeCAS ) throws Exception { CacheEntry entry = getController().getInProcessCache().getCacheEntryForCAS(aCasReferenceId); entry.incrementTimeWaitingForCAS( aTimeWaitingForCAS); entry.incrementTimeToDeserializeCAS(aTimeToDeserializeCAS); } private boolean messageContainsXMI(MessageContext aMessageContext, String casReferenceId) throws Exception { // Fetch serialized CAS from the message String xmi = aMessageContext.getStringMessage(); // ***************************************************************** // ***** NO XMI In Message. Kick this back to sender with exception // ***************************************************************** if ( xmi == null ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_message_has_no_cargo__INFO", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } getController(). getOutputChannel(). sendReply(new InvalidMessageException("No XMI data in message"), casReferenceId, aMessageContext.getEndpoint(),AsynchAEMessage.Process); // Dont process this empty message return false; } return true; } private synchronized CAS getCAS( boolean fetchCASFromShadowCasPool, String shadowCasPoolKey, String casReceivedFrom ) { CAS cas = null; // If this is a new CAS (generated by a CM), fetch a CAS from a Shadow Cas Pool associated with a CM that // produced the CAS. Each CM will have its own Shadow Cas Pool if ( fetchCASFromShadowCasPool ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINEST)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINEST, CLASS_NAME.getName(), "getCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas_cm__FINE", new Object[] { shadowCasPoolKey }); } // Aggregate time spent waiting for a CAS in the shadow cas pool ((AggregateAnalysisEngineController)getController()).getDelegateServicePerformance(shadowCasPoolKey).beginWaitOnShadowCASPool(); cas = getController().getCasManagerWrapper().getNewCas(shadowCasPoolKey); ((AggregateAnalysisEngineController)getController()).getDelegateServicePerformance(shadowCasPoolKey).endWaitOnShadowCASPool(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "getCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas_granted_cm__FINE", new Object[] { shadowCasPoolKey }); } } else { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINEST)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINEST, CLASS_NAME.getName(), "getCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas__FINE", new Object[] { casReceivedFrom }); } // Aggregate time spent waiting for a CAS in the service cas pool getController().getServicePerformance().beginWaitOnCASPool(); cas = getController().getCasManagerWrapper().getNewCas(); getController().getServicePerformance().endWaitOnCASPool(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "getCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas_granted__FINE", new Object[] { casReceivedFrom }); } } return cas; } /** * * @param casReferenceId * @param freeCasEndpoint * @param shadowCasPoolKey * @param aMessageContext * @return * @throws Exception */ private CacheEntry deserializeCASandRegisterWithCache( String casReferenceId, Endpoint freeCasEndpoint, String shadowCasPoolKey, MessageContext aMessageContext) throws Exception { long inTime = System.nanoTime(); boolean casRegistered = false; // Time how long we wait on Cas Pool to fetch a new CAS long t1 = getController().getCpuTime(); // ************************************************************************* // Fetch CAS from a Cas Pool. If the CAS came from a Cas Multiplier // fetch the CAS from a shadow CAS pool. Otherwise, fetch the CAS // from the service CAS Pool. // ************************************************************************* Endpoint endpoint = aMessageContext.getEndpoint(); CAS cas = getCAS(aMessageContext.propertyExists(AsynchAEMessage.CasSequence), shadowCasPoolKey,endpoint.getEndpoint()); long timeWaitingForCAS = getController().getCpuTime() - t1; // Check if we are still running if ( getController().isStopped() ) { // The Controller is in shutdown state. getController().dropCAS(cas); return null; } // ************************************************************************* // Deserialize CAS from the message // ************************************************************************* t1 = getController().getCpuTime(); String serializationStrategy = endpoint.getSerializer(); XmiSerializationSharedData deserSharedData = null; CacheEntry entry = null; if ( serializationStrategy.equals("xmi")) { // Fetch serialized CAS from the message String xmi = aMessageContext.getStringMessage(); deserSharedData = new XmiSerializationSharedData(); // UimaSerializer.deserializeCasFromXmi(xmi, cas, deserSharedData, true, -1); uimaSerializer.deserializeCasFromXmi(xmi, cas, deserSharedData, true, -1); } else if ( serializationStrategy.equals("binary")) { // ************************************************************************* // Register the CAS with a local cache // ************************************************************************* //CacheEntry entry = getController().getInProcessCache().register(cas, aMessageContext, deserSharedData, casReferenceId); byte[] binarySource = aMessageContext.getByteMessage(); // UimaSerializer.deserializeCasFromBinary(binarySource, cas); uimaSerializer.deserializeCasFromBinary(binarySource, cas); } // ************************************************************************* // Check and set up for Delta CAS reply // ************************************************************************* boolean acceptsDeltaCas = false; Marker marker = null; if (aMessageContext.propertyExists(AsynchAEMessage.AcceptsDeltaCas)) { acceptsDeltaCas = aMessageContext.getMessageBooleanProperty(AsynchAEMessage.AcceptsDeltaCas); if (acceptsDeltaCas ) { marker = cas.createMarker(); } } // ************************************************************************* // Register the CAS with a local cache // ************************************************************************* //CacheEntry entry = getController().getInProcessCache().register(cas, aMessageContext, deserSharedData, casReferenceId); entry = getController().getInProcessCache().register(cas, aMessageContext, deserSharedData, casReferenceId, marker, acceptsDeltaCas); long timeToDeserializeCAS = getController().getCpuTime() - t1; getController().incrementDeserializationTime(timeToDeserializeCAS); LongNumericStatistic statistic; if ( (statistic = getController().getMonitor().getLongNumericStatistic("",Monitor.TotalDeserializeTime)) != null ) { statistic.increment(timeToDeserializeCAS); } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_deserialize_cas_time_FINE", new Object[] { timeToDeserializeCAS / 1000 }); } // Update Stats ServicePerformance casStats = getController().getCasStatistics(casReferenceId); casStats.incrementCasDeserializationTime(timeToDeserializeCAS); if ( getController().isTopLevelComponent() ) { synchronized( mux ) { getController().getServicePerformance().incrementCasDeserializationTime(timeToDeserializeCAS); } } getController().saveTime(inTime, casReferenceId, getController().getName()); if ( getController() instanceof AggregateAnalysisEngineController ) { // If the message came from a Cas Multiplier, associate the input/parent CAS id with this CAS if ( aMessageContext.propertyExists(AsynchAEMessage.CasSequence) ) { // Fetch parent CAS id String inputCasReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.InputCasReference); if ( shadowCasPoolKey != null ) { // Save the key of the Cas Multiplier in the cache. It will be now known which Cas Multiplier produced this CAS entry.setCasMultiplierKey(shadowCasPoolKey); } // associate this subordinate CAS with the parent CAS entry.setInputCasReferenceId(inputCasReferenceId); // Save a Cas Multiplier endpoint where a Free CAS notification will be sent entry.setFreeCasEndpoint(freeCasEndpoint); cacheStats( inputCasReferenceId, timeWaitingForCAS, timeToDeserializeCAS); } else { cacheStats( casReferenceId, timeWaitingForCAS, timeToDeserializeCAS); } DelegateStats stats = new DelegateStats(); if ( entry.getStat() == null ) { entry.setStat(stats); // Add entry for self (this aggregate). MessageContext.getEndpointName() // returns the name of the queue receiving the message. stats.put(getController().getServiceEndpointName(), new TimerStats()); } else { if (!stats.containsKey(getController().getServiceEndpointName())) { stats.put(getController().getServiceEndpointName(), new DelegateStats()); } } } else { cacheStats( casReferenceId, timeWaitingForCAS, timeToDeserializeCAS); } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_deserialized_cas_ready_to_process_FINE", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } cacheProcessCommandInClientEndpoint(); return entry; } private String getCasReferenceId( MessageContext aMessageContext ) throws Exception { if ( !aMessageContext.propertyExists(AsynchAEMessage.CasReference) ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_message_has_cas_refid__INFO", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } getController(). getOutputChannel(). sendReply(new InvalidMessageException("No Cas Reference Id Received From Delegate In message"), null, aMessageContext.getEndpoint(),AsynchAEMessage.Process); return null; } return aMessageContext.getMessageStringProperty(AsynchAEMessage.CasReference); } /** * Handles process request from a remote client * * @param aMessageContext - contains a message from UIMA-AS Client * @throws AsynchAEException */ private void handleProcessRequestFromRemoteDelegate(MessageContext aMessageContext) throws AsynchAEException { CacheEntry entry = null; String casReferenceId = null; // Check if there is a cargo in the message if ( aMessageContext.getMessageIntProperty(AsynchAEMessage.Payload)== AsynchAEMessage.XMIPayload && aMessageContext.getStringMessage() == null ) { return; // No XMI just return } try { String newCASProducedBy = null; // Get the CAS Reference Id of the input CAS // Fetch id of the CAS from the message. If it doesnt exist the method will create an entry in the log file and return null casReferenceId = getCasReferenceId(aMessageContext); if ( casReferenceId == null ) { return; // Invalid message. Nothing to do } // Initially make both equal String inputCasReferenceId = casReferenceId; // Destination where Free Cas Notification will be sent if the CAS came from a Cas Multiplier Endpoint freeCasEndpoint = null; // CASes generated by a Cas Multiplier will have a CasSequence property set. if ( aMessageContext.propertyExists(AsynchAEMessage.CasSequence) ) { // Fetch an ID of the parent CAS inputCasReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.InputCasReference); // Fetch Cache entry for the parent CAS CacheEntry inputCasCacheEntry = getController().getInProcessCache().getCacheEntryForCAS(inputCasReferenceId); CasStateEntry casStateEntry = null; if ( getController() instanceof AggregateAnalysisEngineController ) { casStateEntry = ((AggregateAnalysisEngineController)getController()). getLocalCache().lookupEntry(inputCasReferenceId); casStateEntry.incrementSubordinateCasInPlayCount(); } computeStats(aMessageContext, inputCasReferenceId); // Fetch an endpoint where Free CAS Notification must be sent. // This endpoint is unique per CM instance. Meaning, each // instance of CM will have an endpoint where it expects Free CAS // notifications. freeCasEndpoint = aMessageContext.getEndpoint(); // Clone an endpoint where Free Cas Request will be sent freeCasEndpoint = (Endpoint)((Endpoint_impl)freeCasEndpoint).clone(); // Reset the destination aMessageContext.getEndpoint().setDestination(null); // This CAS came in from a CAS Multiplier. Treat it differently than the // input CAS. In case the Aggregate needs to send this CAS to the // client, retrieve the client destination by looking up the client endpoint // using input CAS reference id. CASes generated by the CAS multiplier will have // the same Cas Reference id. Endpoint replyToEndpoint = inputCasCacheEntry.getMessageOrigin(); // The message context contains a Cas Multiplier endpoint. Since // we dont want to send a generated CAS back to the CM, override // with an endpoint provided by the client of // this service. Client endpoint is attached to an input Cas cache entry. aMessageContext.getEndpoint().setEndpoint(replyToEndpoint.getEndpoint()); aMessageContext.getEndpoint().setServerURI(replyToEndpoint.getServerURI()); // Before sending a CAS to Cas Multiplier, the aggregate has // saved the CM key in the CAS cache entry. Fetch the key // of the CM so that we can ask the right Shadow Cas Pool for // a new CAS. Every Shadow Cas Pool has a unique id which // corresponds to a Cas Multiplier key. newCASProducedBy = inputCasCacheEntry.getCasMultiplierKey(); if ( getController() instanceof AggregateAnalysisEngineController ) { Endpoint casMultiplierEndpoint = ((AggregateAnalysisEngineController)getController()).lookUpEndpoint(newCASProducedBy, false); if ( casMultiplierEndpoint != null ) { // Save the URL of the broker managing the Free Cas Notification queue. // This is needed when we try to establish a connection to the broker. freeCasEndpoint.setServerURI(casMultiplierEndpoint.getServerURI()); } } // increment number of CASes produced from an input CAS // The input CAS (parent) will be held by // the aggregate until all of its subordinate CASes are // fully processed. Only then, the aggregate can return // it back to the client } else if ( getController().isTopLevelComponent() && getController() instanceof AggregateAnalysisEngineController ) { ((AggregateAnalysisEngineController)getController()).addMessageOrigin(casReferenceId, aMessageContext.getEndpoint()); } // To prevent processing multiple messages with the same CasReferenceId, check the CAS cache // to see if the message with a given CasReferenceId is already being processed. It is, the // message contains the same request possibly issued by the caller due to timeout. Also this // mechanism helps with dealing with scenario when this service is not up when the client sends // request. The client can keep re-sending the same request until its timeout thresholds are // exceeded. By that time, there may be multiple messages in this service queue with the same // CasReferenceId. When the service finally comes back up, it will have multiple messages in // its queue possibly from the same client. Only the first message for any given CasReferenceId // should be processed. if ( !getController().getInProcessCache().entryExists(casReferenceId) ) { entry = deserializeCASandRegisterWithCache( casReferenceId, freeCasEndpoint, newCASProducedBy, aMessageContext); if ( getController().isStopped() || entry == null || entry.getCas() == null) { if ( entry != null ) { // The Controller is in shutdown state, release the CAS getController().dropCAS( entry.getCasReferenceId(), true); entry = null; } return; } // ***************************************************************** // Process the CAS // ***************************************************************** invokeProcess(entry.getCas(), inputCasReferenceId, casReferenceId, aMessageContext, newCASProducedBy); } else { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_duplicate_request__INFO", new Object[] { casReferenceId}); } } } catch ( Exception e) { e.printStackTrace(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.WARNING)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.WARNING, getClass().getName(), "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_exception__WARNING", e); } ErrorContext errorContext = new ErrorContext(); errorContext.add(AsynchAEMessage.Endpoint, aMessageContext.getEndpoint()); errorContext.add(AsynchAEMessage.Command, AsynchAEMessage.Process); errorContext.add(AsynchAEMessage.CasReference, casReferenceId ); if ( entry != null ) { getController().dropCAS(entry.getCas()); } getController().getErrorHandlerChain().handle(e, errorContext, getController()); } } private void handleProcessRequestWithCASReference(MessageContext aMessageContext) throws AsynchAEException { boolean isNewCAS = false; String newCASProducedBy = null; try { // This is only used when handling CASes produced by CAS Multiplier String inputCasReferenceId = null; CAS cas = null; String casReferenceId = getCasReferenceId(aMessageContext); // Check if this Cas has been sent from a Cas Multiplier. If so, its sequence will be > 0 if ( aMessageContext.propertyExists(AsynchAEMessage.CasSequence) ) { isNewCAS = true; Endpoint casMultiplierEndpoint = aMessageContext.getEndpoint(); if ( casMultiplierEndpoint == null ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_no_endpoint_for_reply__INFO", new Object[] { casReferenceId }); } return; } // Get the id of the parent Cas inputCasReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.InputCasReference); if ( getController() instanceof AggregateAnalysisEngineController ) { // Save the endpoint of the CM which produced the Cas getController().getInProcessCache().setCasProducer(casReferenceId, casMultiplierEndpoint.getEndpoint()); // Convert the endpoint to a key newCASProducedBy = ((AggregateAnalysisEngineController)getController()).lookUpDelegateKey(casMultiplierEndpoint.getEndpoint()); casMultiplierEndpoint.setIsCasMultiplier(true); // Safety check. The input Cas should not be null here if ( inputCasReferenceId != null ) { try { Endpoint endp = null; // Located the origin of the parent Cas. The produced Cas will inherit the origin from its parent. // Once the origin is identified, save the origin using the produced Cas id as a key. if ( endp == null ) { boolean gotTheEndpoint = false; String parentCasId = inputCasReferenceId; // Loop through the parent tree until an origin is found while( !gotTheEndpoint ) { // Check if the current parent has an associated origin endp = ((AggregateAnalysisEngineController)getController()).getMessageOrigin(parentCasId); // Check if there is an origin. If so, we are done if ( endp != null ) { break; } // The current parent has no origin, get its parent and try again CacheEntry entry = getController().getInProcessCache().getCacheEntryForCAS(parentCasId); parentCasId = entry.getInputCasReferenceId(); // Check if we reached the top of the hierarchy tree. If so, we have no origin. This should // never be the case. Every Cas must have an origin if ( parentCasId == null ) { break; } } } // If origin not found log it as this indicates an error if ( endp == null ) { System.out.println("Endpoint Not Found For Cas Id:"+inputCasReferenceId); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_msg_origin_not_found__INFO", new Object[] { getController().getComponentName(), inputCasReferenceId }); } } else { ((AggregateAnalysisEngineController)getController()).addMessageOrigin(casReferenceId, endp); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINEST)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINEST, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_msg_origin_added__FINEST", new Object[] { getController().getComponentName(), casReferenceId, newCASProducedBy }); } } } catch( Exception e) { e.printStackTrace(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.WARNING)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.WARNING, getClass().getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_exception__WARNING", e); } } } else { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_input_cas_invalid__INFO", new Object[] { getController().getComponentName(), newCASProducedBy, casReferenceId }); } } } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_new_cas__FINE", new Object[] { casReferenceId, newCASProducedBy }); } aMessageContext.getEndpoint().setEndpoint(casMultiplierEndpoint.getEndpoint()); aMessageContext.getEndpoint().setServerURI(casMultiplierEndpoint.getServerURI()); } else { if ( getController() instanceof AggregateAnalysisEngineController ) { ((AggregateAnalysisEngineController)getController()).addMessageOrigin(casReferenceId, aMessageContext.getEndpoint()); } } cas = getController().getInProcessCache().getCasByReference(casReferenceId); long arrivalTime = System.nanoTime(); getController().saveTime(arrivalTime, casReferenceId, getController().getName());//aMessageContext.getEndpointName()); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithCASReference", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_analyzing_cas__FINE", new Object[] { casReferenceId}); } // Save Process command in the client endpoint. cacheProcessCommandInClientEndpoint(); if ( getController().isStopped() ) { return; } if ( isNewCAS ) { invokeProcess(cas, inputCasReferenceId, casReferenceId, aMessageContext, newCASProducedBy); } else { invokeProcess(cas, casReferenceId, null, aMessageContext, newCASProducedBy); } } catch ( AsynchAEException e) { throw e; } catch ( Exception e) { throw new AsynchAEException(e); } } private void handleProcessRequestWithXCAS(MessageContext aMessageContext) throws AsynchAEException { try { // Get the CAS Reference Id of the input CAS String casReferenceId = getCasReferenceId(aMessageContext); String inputCasReferenceId = casReferenceId; // This is only used when handling CASes produced by CAS Multiplier String newCASProducedBy = null; if ( aMessageContext.propertyExists(AsynchAEMessage.CasSequence) ) { // This CAS came in from the CAS Multiplier. Treat it differently than the // input CAS. First, in case the Aggregate needs to send this CAS to the // client, retrieve the client destination by looking up the client endpoint // using input CAS reference id. CASes generated by the CAS multiplier will have // the same Cas Reference id. Endpoint replyToEndpoint = getController(). getInProcessCache(). getCacheEntryForCAS(casReferenceId).getMessageOrigin(); // if ( getController() instanceof AggregateAnalysisEngineController ) { newCASProducedBy = ((AggregateAnalysisEngineController)getController()).lookUpDelegateKey(replyToEndpoint.getEndpoint()); } // MessageContext contains endpoint set by the CAS Multiplier service. Overwrite // this with the endpoint of the client who sent the input CAS. In case this // aggregate is configured to send new CASes to the client we know where to send them. aMessageContext.getEndpoint().setEndpoint(replyToEndpoint.getEndpoint()); aMessageContext.getEndpoint().setServerURI(replyToEndpoint.getServerURI()); inputCasReferenceId = String.valueOf(casReferenceId); // Set this to null so that the new CAS gets its own Cas Reference Id below casReferenceId = null; } long arrivalTime = System.nanoTime(); getController().saveTime(arrivalTime, casReferenceId, getController().getName());//aMessageContext.getEndpointName()); // To prevent processing multiple messages with the same CasReferenceId, check the CAS cache // to see if the message with a given CasReferenceId is already being processed. It is, the // message contains the same request possibly issued by the caller due to timeout. Also this // mechanism helps with dealing with scenario when this service is not up when the client sends // request. The client can keep re-sending the same request until its timeout thresholds are // exceeded. By that time, there may be multiple messages in this service queue with the same // CasReferenceId. When the service finally comes back up, it will have multiple messages in // its queue possibly from the same client. Only the first message for any given CasReferenceId // should be processed. if ( casReferenceId == null || !getController().getInProcessCache().entryExists(casReferenceId) ) { String xmi = aMessageContext.getStringMessage(); // ***************************************************************** // ***** NO XMI In Message. Kick this back to sender with exception // ***************************************************************** if ( xmi == null ) { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_message_has_no_cargo__INFO", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } getController(). getOutputChannel(). sendReply(new InvalidMessageException("No XMI data in message"), casReferenceId, aMessageContext.getEndpoint(),AsynchAEMessage.Process); // Dont process this empty message return; } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas__FINE", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } CAS cas = getController().getCasManagerWrapper().getNewCas(); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_request_cas_granted__FINE", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } XmiSerializationSharedData deserSharedData = new XmiSerializationSharedData(); // UimaSerializer.deserializeCasFromXmi(xmi, cas, deserSharedData, true, -1); uimaSerializer.deserializeCasFromXmi(xmi, cas, deserSharedData, true, -1); if (casReferenceId == null) { CacheEntry entry = getController().getInProcessCache().register(cas, aMessageContext, deserSharedData); casReferenceId = entry.getCasReferenceId(); } else { if (getController() instanceof PrimitiveAnalysisEngineController) { getController().getInProcessCache().register(cas, aMessageContext, deserSharedData, casReferenceId); } } if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_deserialized_cas_ready_to_process_FINE", new Object[] { aMessageContext.getEndpoint().getEndpoint() }); } cacheProcessCommandInClientEndpoint(); invokeProcess(cas, inputCasReferenceId, casReferenceId, aMessageContext, newCASProducedBy); } else { if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.INFO)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.INFO, CLASS_NAME.getName(), "handleProcessRequestWithXCAS", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_duplicate_request__INFO", new Object[] { casReferenceId}); } } } catch ( AsynchAEException e) { throw e; } catch ( Exception e) { throw new AsynchAEException(e); } } private void cacheProcessCommandInClientEndpoint() { Endpoint clientEndpoint = getController().getClientEndpoint(); if ( clientEndpoint != null ) { clientEndpoint.setCommand(AsynchAEMessage.Process); } } private void handleCollectionProcessCompleteRequest(MessageContext aMessageContext) throws AsynchAEException { Endpoint replyToEndpoint = aMessageContext.getEndpoint(); getController().collectionProcessComplete(replyToEndpoint); } private void handleReleaseCASRequest(MessageContext aMessageContext) throws AsynchAEException { String casReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.CasReference); if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), "handleReleaseCASRequest", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_release_cas_req__FINE", new Object[] { getController().getName(), casReferenceId }); } getController().releaseNextCas(casReferenceId); } private void handlePingRequest(MessageContext aMessageContext) { try { getController().getOutputChannel().sendReply(AsynchAEMessage.Ping, aMessageContext.getEndpoint()); } catch ( Exception e) { e.printStackTrace(); } } private void handleStopRequest(MessageContext aMessageContext) { System.out.println("###################Controller::"+getController().getComponentName()+" Received <<<STOP>>> Request"); if ( getController() instanceof PrimitiveAnalysisEngineController ) { try { String casReferenceId = aMessageContext.getMessageStringProperty(AsynchAEMessage.CasReference); ( (PrimitiveAnalysisEngineController)getController()).addAbortedCasReferenceId(casReferenceId); } catch( Exception e){} } } /** * Main method called by the predecessor handler. * * */ public void handle(Object anObjectToHandle) //throws AsynchAEException { try { super.validate(anObjectToHandle); MessageContext messageContext = (MessageContext) anObjectToHandle; if ( isHandlerForMessage(messageContext, AsynchAEMessage.Request, AsynchAEMessage.Process) || isHandlerForMessage(messageContext, AsynchAEMessage.Request, AsynchAEMessage.CollectionProcessComplete) || isHandlerForMessage(messageContext, AsynchAEMessage.Request, AsynchAEMessage.ReleaseCAS ) || isHandlerForMessage(messageContext, AsynchAEMessage.Request, AsynchAEMessage.Stop ) ) { int payload = messageContext.getMessageIntProperty(AsynchAEMessage.Payload); int command = messageContext.getMessageIntProperty(AsynchAEMessage.Command); getController().getControllerLatch().waitUntilInitialized(); // If a Process Request, increment number of CASes processed if (messageContext.getMessageIntProperty(AsynchAEMessage.MessageType) == AsynchAEMessage.Request && command == AsynchAEMessage.Process &&!messageContext.propertyExists(AsynchAEMessage.CasSequence)) { // Increment number of CASes processed by this service getController().getServicePerformance().incrementNumberOfCASesProcessed(); } if ( getController().isStopped() ) { return; } if (AsynchAEMessage.CASRefID == payload) { // Fetch id of the CAS from the message. if ( getCasReferenceId(messageContext) == null ) { return; // Invalid message. Nothing to do } handleProcessRequestWithCASReference(messageContext); } else if (AsynchAEMessage.XMIPayload == payload || AsynchAEMessage.BinaryPayload == payload) { // Fetch id of the CAS from the message. if ( getCasReferenceId(messageContext) == null ) { return; // Invalid message. Nothing to do } handleProcessRequestFromRemoteDelegate(messageContext); } else if (AsynchAEMessage.XCASPayload == payload) { // Fetch id of the CAS from the message. if ( getCasReferenceId(messageContext) == null ) { return; // Invalid message. Nothing to do } handleProcessRequestWithXCAS(messageContext); } else if ( AsynchAEMessage.None == payload && AsynchAEMessage.CollectionProcessComplete == command) { handleCollectionProcessCompleteRequest(messageContext); } else if ( AsynchAEMessage.None == payload && AsynchAEMessage.ReleaseCAS == command) { handleReleaseCASRequest(messageContext); } else if ( AsynchAEMessage.None == payload && AsynchAEMessage.Stop == command) { handleStopRequest(messageContext); } else if ( AsynchAEMessage.None == payload && AsynchAEMessage.Ping == command) { handlePingRequest(messageContext); } // Handled Request return; } // Not a Request nor Command. Delegate to the next handler in the chain super.delegate(messageContext); } catch( Exception e) { e.printStackTrace(); getController().getErrorHandlerChain().handle(e, HandlerBase.populateErrorContext( (MessageContext)anObjectToHandle ), getController()); } } }
UIMA-1270 commit Burn's patch git-svn-id: dd361d0afbe84f3eb97f7061549e905c2c5df34b@737215 13f79535-47bb-0310-9956-ffa450edef68
uima-as/uimaj-as-core/src/main/java/org/apache/uima/aae/handler/input/ProcessRequestHandler_impl.java
UIMA-1270 commit Burn's patch
<ide><path>ima-as/uimaj-as-core/src/main/java/org/apache/uima/aae/handler/input/ProcessRequestHandler_impl.java <ide> if (UIMAFramework.getLogger(CLASS_NAME).isLoggable(Level.FINE)) { <ide> UIMAFramework.getLogger(CLASS_NAME).logrb(Level.FINE, CLASS_NAME.getName(), <ide> "handleProcessRequestWithXMI", UIMAEE_Constants.JMS_LOG_RESOURCE_BUNDLE, "UIMAEE_deserialize_cas_time_FINE", <del> new Object[] { timeToDeserializeCAS / 1000 }); <add> new Object[] { (double)timeToDeserializeCAS / 1000000.0}); <ide> } <ide> <ide> // Update Stats
Java
unlicense
2b542307f6a3ba34baa2e5a825cb25449956f3f1
0
uruba/TruckApp,uruba/ETS2MP-Companion
package cz.uruba.ets2mpcompanion.fragments; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.EditTextPreference; import android.preference.Preference; import android.preference.PreferenceFragment; import android.text.TextUtils; import java.util.Arrays; import cz.uruba.ets2mpcompanion.R; import cz.uruba.ets2mpcompanion.SettingsActivity; import cz.uruba.ets2mpcompanion.preferences.AutoRefreshIntervalPreference; import cz.uruba.ets2mpcompanion.preferences.ColourChooserPreference; import cz.uruba.ets2mpcompanion.preferences.CustomEditTextPreference; import cz.uruba.ets2mpcompanion.preferences.FormattedEditTextPreference; public class SettingsFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener { public static final String PREF_CUSTOM_THEME_ENABLED = "preference_custom_theme"; public static final String PREF_THEME_COLOUR = "preference_theme_colour"; public static final String PREF_WIDGET_TOAST_ENABLED = "preference_widget_toast"; public static final String PREF_MEETUP_REMINDERS_DEFAULT_TITLE = "preference_meetup_reminders_default_title"; public static final String PREF_MEETUP_REMINDERS_DEFAULT_DESCRIPTION = "preference_meetup_reminders_default_description"; public static final String PREF_AUTO_REFRESH_INTERVAL = "preference_auto_refresh_interval"; public static final String[] preferencesSummaryUpdatedFor = { PREF_THEME_COLOUR, PREF_MEETUP_REMINDERS_DEFAULT_TITLE, PREF_MEETUP_REMINDERS_DEFAULT_DESCRIPTION, PREF_AUTO_REFRESH_INTERVAL }; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Load the preferences from an XML resource addPreferencesFromResource(R.xml.preferences); } @Override public void onResume() { super.onResume(); getPreferenceManager().getSharedPreferences().registerOnSharedPreferenceChangeListener(this); for (String preferenceKey : preferencesSummaryUpdatedFor) { updateSummaryAsCurrentValue(findPreference(preferenceKey)); } } @Override public void onPause() { getPreferenceManager().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this); super.onPause(); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { switch(key) { case PREF_CUSTOM_THEME_ENABLED: case PREF_THEME_COLOUR: SettingsActivity parentActivity = (SettingsActivity) getActivity(); Intent intent = parentActivity.getIntent(); parentActivity.finishWithoutExitAnimation(); startActivity(intent); } if (Arrays.asList(preferencesSummaryUpdatedFor).contains(key)) { updateSummaryAsCurrentValue(findPreference(key)); } } private void updateSummaryAsCurrentValue(Preference preference) { String currentValue; if (preference instanceof EditTextPreference) { currentValue = ((EditTextPreference) preference).getText(); } else if (preference instanceof ColourChooserPreference) { if(TextUtils.isEmpty(((ColourChooserPreference) preference).getValue())) { return; } currentValue = ((ColourChooserPreference) preference).getValueThemeColour(); } else if (preference instanceof FormattedEditTextPreference) { currentValue = ((FormattedEditTextPreference) preference).getText(); } else if (preference instanceof CustomEditTextPreference) { currentValue = ((CustomEditTextPreference) preference).getText(); } else if (preference instanceof AutoRefreshIntervalPreference){ int minutesTotal = ((AutoRefreshIntervalPreference) preference).getIntervalLengthMinutes(); currentValue = minutesTotal > 0 ? String.format(getString(R.string.settings_summary_auto_refresh_interval), minutesTotal / 60, minutesTotal % 60) : getString(R.string.settings_summary_auto_refresh_interval_never); } else { return; } preference.setSummary(currentValue); } }
app/src/main/java/cz/uruba/ets2mpcompanion/fragments/SettingsFragment.java
package cz.uruba.ets2mpcompanion.fragments; import android.content.Intent; import android.content.SharedPreferences; import android.os.Bundle; import android.preference.EditTextPreference; import android.preference.Preference; import android.preference.PreferenceFragment; import android.text.TextUtils; import java.util.Arrays; import cz.uruba.ets2mpcompanion.R; import cz.uruba.ets2mpcompanion.SettingsActivity; import cz.uruba.ets2mpcompanion.preferences.AutoRefreshIntervalPreference; import cz.uruba.ets2mpcompanion.preferences.ColourChooserPreference; import cz.uruba.ets2mpcompanion.preferences.CustomEditTextPreference; import cz.uruba.ets2mpcompanion.preferences.FormattedEditTextPreference; public class SettingsFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener { public static final String PREF_CUSTOM_THEME_ENABLED = "preference_custom_theme"; public static final String PREF_THEME_COLOUR = "preference_theme_colour"; public static final String PREF_WIDGET_TOAST_ENABLED = "preference_widget_toast"; public static final String PREF_MEETUP_REMINDERS_DEFAULT_TITLE = "preference_meetup_reminders_default_title"; public static final String PREF_MEETUP_REMINDERS_DEFAULT_DESCRIPTION = "preference_meetup_reminders_default_description"; public static final String PREF_AUTO_REFRESH_SERVER_LIST = "preference_auto_refresh_server_list"; public static final String PREF_AUTO_REFRESH_MEETUP_LIST = "preference_auto_refresh_meetup_list"; public static final String PREF_AUTO_REFRESH_INTERVAL = "preference_auto_refresh_interval"; public static final String[] preferencesSummaryUpdatedFor = { PREF_THEME_COLOUR, PREF_MEETUP_REMINDERS_DEFAULT_TITLE, PREF_MEETUP_REMINDERS_DEFAULT_DESCRIPTION, PREF_AUTO_REFRESH_INTERVAL }; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Load the preferences from an XML resource addPreferencesFromResource(R.xml.preferences); } @Override public void onResume() { super.onResume(); getPreferenceManager().getSharedPreferences().registerOnSharedPreferenceChangeListener(this); for (String preferenceKey : preferencesSummaryUpdatedFor) { updateSummaryAsCurrentValue(findPreference(preferenceKey)); } } @Override public void onPause() { getPreferenceManager().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this); super.onPause(); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { switch(key) { case PREF_CUSTOM_THEME_ENABLED: case PREF_THEME_COLOUR: SettingsActivity parentActivity = (SettingsActivity) getActivity(); Intent intent = parentActivity.getIntent(); parentActivity.finishWithoutExitAnimation(); startActivity(intent); } if (Arrays.asList(preferencesSummaryUpdatedFor).contains(key)) { updateSummaryAsCurrentValue(findPreference(key)); } } private void updateSummaryAsCurrentValue(Preference preference) { String currentValue; if (preference instanceof EditTextPreference) { currentValue = ((EditTextPreference) preference).getText(); } else if (preference instanceof ColourChooserPreference) { if(TextUtils.isEmpty(((ColourChooserPreference) preference).getValue())) { return; } currentValue = ((ColourChooserPreference) preference).getValueThemeColour(); } else if (preference instanceof FormattedEditTextPreference) { currentValue = ((FormattedEditTextPreference) preference).getText(); } else if (preference instanceof CustomEditTextPreference) { currentValue = ((CustomEditTextPreference) preference).getText(); } else if (preference instanceof AutoRefreshIntervalPreference){ int minutesTotal = ((AutoRefreshIntervalPreference) preference).getIntervalLengthMinutes(); currentValue = minutesTotal > 0 ? String.format(getString(R.string.settings_summary_auto_refresh_interval), minutesTotal / 60, minutesTotal % 60) : getString(R.string.settings_summary_auto_refresh_interval_never); } else { return; } preference.setSummary(currentValue); } }
– removed unused properties
app/src/main/java/cz/uruba/ets2mpcompanion/fragments/SettingsFragment.java
– removed unused properties
<ide><path>pp/src/main/java/cz/uruba/ets2mpcompanion/fragments/SettingsFragment.java <ide> public static final String PREF_WIDGET_TOAST_ENABLED = "preference_widget_toast"; <ide> public static final String PREF_MEETUP_REMINDERS_DEFAULT_TITLE = "preference_meetup_reminders_default_title"; <ide> public static final String PREF_MEETUP_REMINDERS_DEFAULT_DESCRIPTION = "preference_meetup_reminders_default_description"; <del> public static final String PREF_AUTO_REFRESH_SERVER_LIST = "preference_auto_refresh_server_list"; <del> public static final String PREF_AUTO_REFRESH_MEETUP_LIST = "preference_auto_refresh_meetup_list"; <ide> public static final String PREF_AUTO_REFRESH_INTERVAL = "preference_auto_refresh_interval"; <ide> <ide> public static final String[] preferencesSummaryUpdatedFor =
Java
bsd-2-clause
7b931aca51009a770ffbfbc794cb05a417c3afb9
0
deepinniagafalls/part2,deepinniagafalls/ScrabbleStage3
package code; import java.awt.FileDialog; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import javax.swing.JFrame; @SuppressWarnings("unused") public class SaveGame { private String _fileToWriteTo = "C:\\Users\\Michael\\myfile.txt"; public static void main(String[] args) throws IOException{ SaveGame t = new SaveGame(); t.writeNewFile(); } public SaveGame() throws IOException{ /* JFrame j = new JFrame(); FileDialog chooser = new FileDialog(j,"Save your file",FileDialog.SAVE); //FileDialog chooser = new FileDialog(j,"Save your file",FileDialog.LOAD); chooser.setDirectory("C:\\"); chooser.setFile("*.txt"); chooser.setVisible(true); String filename = chooser.getFile(); String path = chooser.getDirectory(); System.out.println(path); String filetoWrite = path + filename; _fileToWriteTo = filetoWrite; //File file = new File("/users/mkyong/filename.txt"); */ String content = "YOOOOOOOOOOOOOOOOOOOOOOOo"; File file = new File(_fileToWriteTo); FileWriter fw = new FileWriter(file.getAbsoluteFile()); BufferedWriter bw = new BufferedWriter(fw); bw.write(content); bw.close(); System.out.println("Done"); } public void writeNewFile() throws IOException{ } }
part2/src/code/SaveGame.java
package code; import java.awt.FileDialog; import javax.swing.JFrame; @SuppressWarnings("unused") public class SaveGame { public static void main(String[] args){ SaveGame t = new SaveGame(); } public SaveGame(){ JFrame j = new JFrame(); FileDialog chooser = new FileDialog(j); chooser.setDirectory("C:\\"); chooser.setFile("*.txt"); chooser.setVisible(true); String filename = chooser.getFile(); String path = chooser.getDirectory(); System.out.println(path); } }
filewriting
part2/src/code/SaveGame.java
filewriting
<ide><path>art2/src/code/SaveGame.java <ide> package code; <ide> <ide> import java.awt.FileDialog; <del> <add>import java.io.BufferedWriter; <add>import java.io.File; <add>import java.io.FileWriter; <add>import java.io.IOException; <ide> import javax.swing.JFrame; <ide> <ide> <ide> @SuppressWarnings("unused") <ide> public class SaveGame { <ide> <del> public static void main(String[] args){ <add> private String _fileToWriteTo = "C:\\Users\\Michael\\myfile.txt"; <add> <add> public static void main(String[] args) throws IOException{ <ide> SaveGame t = new SaveGame(); <add> t.writeNewFile(); <add> <ide> } <ide> <del> public SaveGame(){ <add> public SaveGame() throws IOException{ <add> /* <ide> JFrame j = new JFrame(); <del> FileDialog chooser = new FileDialog(j); <add> FileDialog chooser = new FileDialog(j,"Save your file",FileDialog.SAVE); <add> //FileDialog chooser = new FileDialog(j,"Save your file",FileDialog.LOAD); <ide> chooser.setDirectory("C:\\"); <ide> chooser.setFile("*.txt"); <ide> chooser.setVisible(true); <ide> String filename = chooser.getFile(); <ide> String path = chooser.getDirectory(); <ide> System.out.println(path); <add> String filetoWrite = path + filename; <add> _fileToWriteTo = filetoWrite; <add> //File file = new File("/users/mkyong/filename.txt"); <add> */ <add> String content = "YOOOOOOOOOOOOOOOOOOOOOOOo"; <add> File file = new File(_fileToWriteTo); <add> FileWriter fw = new FileWriter(file.getAbsoluteFile()); <add> BufferedWriter bw = new BufferedWriter(fw); <add> bw.write(content); <add> bw.close(); <add> System.out.println("Done"); <add> <ide> } <add> <add> public void writeNewFile() throws IOException{ <add> <add> <add> } <add> <ide> }
Java
apache-2.0
cdb9c2af9c3553e4494470d11493dafef6a7eb8f
0
varsis/spatial4j
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.spatial4j.core.shape.graph; import java.util.ArrayList; import java.util.List; import com.spatial4j.core.shape.Shape; import com.spatial4j.core.shape.Vector3D; import com.spatial4j.core.shape.Point; import com.spatial4j.core.math.VectorUtils; /** * S2 Implementation of a Geodesic Line defined by a series of points * Used in Spatial4j for computing intersections between geodesics * * Represents a sequence of zero or more vertices connected by straight * edges (geodesics) \ * * Fast utility in 3D - easier to not convert between for performance reasons */ public class S2Polyline { // Data private List< Vector3D > vertices; /** * Construct an Empty S2 Polyline */ public S2Polyline() {} /** * Construct an S2 Polyline from a series of Vector3D Points */ public S2Polyline( List< Vector3D > vertices ) { init3D(vertices); } /** * Construct an S2 Polyline from a series of Lat/Long Points */ public S2Polyline( List< Point > vertices ) { init2D(vertices); } /** * Initialize a polyline that connects the given vertices. Empty * polylines are allowed. Adjacent vertices should not be identical or antipodal. * All vertices should be of unit length. */ public void init3D( List< Vector3D > vertices ) { } /** * Convenience initialization of a polyline from 2D lat/long points */ public void init2D( List< Point > vertices ) { } ///// Properties of the Polyline ///// /** * Determine if the PolyLine is empty (this is a valid state of the polyline) */ boolean isEmpty() { } /** * Determine if the polyline is valid (return true after construction) */ boolean isValid() { } ///// Geometric Operations on the Polyline //////// /** * Return the index of the next polyline after the interpolatio of the point p. Allows * caller to easily construct a given suffix of the polyline by concatenating p * with the polyline. * P is guaranteed to be different than vertex next so will never get duplicates. * * Polyline must not be empty. * * some more description.... */ Vector3D getSuffix( double fraction, Vector3D next_vertex ) { } /** * Return the lenght of the polyline */ double getLength() { } /** * Return the true center of the polyline multiplied by the length of the polyline. * The result will not be of unit length so normalize it */ Vector3D getCentroid() { } /** * Return the poitn whose distance from vertex 0 is the given fraciton of the polyline's total * length. Fractions less than 0 or greater than 1 are clamped. Return value is unit lenght. * Polyine must not be empty */ Vector3D interpolate( double fraction ) { } /** * Inverse operation of getSuffix/interpolate. given a point on the polyline, return * the ratio of the distance to the point of the beginning of the polyline over * the length of the polyline. Return value x in [0, 1] */ double UnInterpolate(Vector3D point, int next_vertex ) { } /** * Given a point, returns a point on the polyline that is closest to the given point. * See GetSuffix() for the meaning of next vertex which is chosen here w.r.t the projection * point as opposed to the interpolated point. */ Vector3D project( Vector3D point, int next_vertex ) { } /** * Returns true if this polyline intersects the given polyline */ boolean intersects( S2Polyline line ) { } /** * Reverse the order of vertices listed currently in the s2 polyline */ public void reverse() {} /** * Return a subsequence of vertex indices such that * the polyline connecting these indices is never further than the tolerance * from the original polyline. The first and last vertices are always * preserved. * * * straihgt from their code * // Some useful properties of the algorithm: // // - It runs in linear time. // // - The output is always a valid polyline. In particular, adjacent // output vertices are never identical or antipodal. // // - The method is not optimal, but it tends to produce 2-3% fewer // vertices than the Douglas-Peucker algorithm with the same tolerance. // // - The output is *parametrically* equivalent to the original polyline to // within the given tolerance. For example, if a polyline backtracks on // itself and then proceeds onwards, the backtracking will be preserved // (to within the given tolerance). This is different than the // Douglas-Peucker algorithm used in maps/util/geoutil-inl.h, which only // guarantees geometric equivalence. */ void SubsampleVertices( double tolerance, List< Integer > indices ) { } /** * Return true if two polylines have the same number of vertices and corresponding * vertex pairs are separated by no more than max_error * * Max error defualt setting seems to be 1e-15 */ boolean approxEquals( S2Polyline line, double max_erorr ) { } /** * Return true if "covered" is within "max_error of a contiguous subpath of * this polyline over its entire length. Specifically, returns true if this polyline has parameterization a:[0,1] * -> s^2, "covered" has parameterization b:[0,1]->S^2 and there is a non decreasing function f:[0, 1] -> [0,1] * such that the distance(a(f(t)), b(t)) <= max_error for all t. */ boolean nearlyCoversPolyline( S2Polyline covered, double max_error ) { } /// Some Additional Relational Methods ////////// boolean contains( Vector3D point ) { } /** * Exact java .equals method */ @Override public boolean equals( Object o ) { return equals( this, o ); } /** * Definiton of full equality for 2 s2 polylines */ public boolean equals( S2Polyline thiz, Object o ) { } /** * HashCode for the polyline */ @Override public int hashCode() { return hashCode(this); } /** * Definition of hashCode for a polyline */ public int hashCode( S2Polyline line ) { } /** * toString Method for polyline */ @Override public String toString() { return toString(this); } /** * Definition of toString for polyline */ public String toString( S2Polyline thiz ) { } }
src/main/java/com/spatial4j/core/shape/graph/S2Polyline.java
package com.spatial4j.core.shape.graph; /** * Created with IntelliJ IDEA. * User: rfalford12 * Date: 3/1/14 * Time: 2:07 PM * To change this template use File | Settings | File Templates. */ public class S2Polyline { }
Adding stub code for S2Polyline direct port
src/main/java/com/spatial4j/core/shape/graph/S2Polyline.java
Adding stub code for S2Polyline direct port
<ide><path>rc/main/java/com/spatial4j/core/shape/graph/S2Polyline.java <add>/* <add> * Licensed to the Apache Software Foundation (ASF) under one or more <add> * contributor license agreements. See the NOTICE file distributed with <add> * this work for additional information regarding copyright ownership. <add> * The ASF licenses this file to You under the Apache License, Version 2.0 <add> * (the "License"); you may not use this file except in compliance with <add> * the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <ide> package com.spatial4j.core.shape.graph; <ide> <add>import java.util.ArrayList; <add>import java.util.List; <add> <add>import com.spatial4j.core.shape.Shape; <add>import com.spatial4j.core.shape.Vector3D; <add>import com.spatial4j.core.shape.Point; <add> <add>import com.spatial4j.core.math.VectorUtils; <add> <ide> /** <del> * Created with IntelliJ IDEA. <del> * User: rfalford12 <del> * Date: 3/1/14 <del> * Time: 2:07 PM <del> * To change this template use File | Settings | File Templates. <add> * S2 Implementation of a Geodesic Line defined by a series of points <add> * Used in Spatial4j for computing intersections between geodesics <add> * <add> * Represents a sequence of zero or more vertices connected by straight <add> * edges (geodesics) \ <add> * <add> * Fast utility in 3D - easier to not convert between for performance reasons <ide> */ <del>public class S2Polyline { <add>public class S2Polyline { <add> <add> // Data <add> private List< Vector3D > vertices; <add> <add> /** <add> * Construct an Empty S2 Polyline <add> */ <add> public S2Polyline() {} <add> <add> /** <add> * Construct an S2 Polyline from a series of Vector3D Points <add> */ <add> public S2Polyline( List< Vector3D > vertices ) { <add> init3D(vertices); <add> } <add> <add> /** <add> * Construct an S2 Polyline from a series of Lat/Long Points <add> */ <add> public S2Polyline( List< Point > vertices ) { <add> init2D(vertices); <add> } <add> <add> /** <add> * Initialize a polyline that connects the given vertices. Empty <add> * polylines are allowed. Adjacent vertices should not be identical or antipodal. <add> * All vertices should be of unit length. <add> */ <add> public void init3D( List< Vector3D > vertices ) { <add> <add> } <add> <add> /** <add> * Convenience initialization of a polyline from 2D lat/long points <add> */ <add> public void init2D( List< Point > vertices ) { <add> <add> } <add> <add> ///// Properties of the Polyline ///// <add> /** <add> * Determine if the PolyLine is empty (this is a valid state of the polyline) <add> */ <add> boolean isEmpty() { <add> <add> } <add> <add> /** <add> * Determine if the polyline is valid (return true after construction) <add> */ <add> boolean isValid() { <add> <add> } <add> <add> ///// Geometric Operations on the Polyline //////// <add> <add> /** <add> * Return the index of the next polyline after the interpolatio of the point p. Allows <add> * caller to easily construct a given suffix of the polyline by concatenating p <add> * with the polyline. <add> * P is guaranteed to be different than vertex next so will never get duplicates. <add> * <add> * Polyline must not be empty. <add> * <add> * some more description.... <add> */ <add> Vector3D getSuffix( double fraction, Vector3D next_vertex ) { <add> <add> } <add> <add> /** <add> * Return the lenght of the polyline <add> */ <add> double getLength() { <add> <add> } <add> <add> /** <add> * Return the true center of the polyline multiplied by the length of the polyline. <add> * The result will not be of unit length so normalize it <add> */ <add> Vector3D getCentroid() { <add> <add> } <add> <add> /** <add> * Return the poitn whose distance from vertex 0 is the given fraciton of the polyline's total <add> * length. Fractions less than 0 or greater than 1 are clamped. Return value is unit lenght. <add> * Polyine must not be empty <add> */ <add> Vector3D interpolate( double fraction ) { <add> <add> } <add> <add> /** <add> * Inverse operation of getSuffix/interpolate. given a point on the polyline, return <add> * the ratio of the distance to the point of the beginning of the polyline over <add> * the length of the polyline. Return value x in [0, 1] <add> */ <add> double UnInterpolate(Vector3D point, int next_vertex ) { <add> <add> } <add> <add> /** <add> * Given a point, returns a point on the polyline that is closest to the given point. <add> * See GetSuffix() for the meaning of next vertex which is chosen here w.r.t the projection <add> * point as opposed to the interpolated point. <add> */ <add> Vector3D project( Vector3D point, int next_vertex ) { <add> <add> } <add> <add> /** <add> * Returns true if this polyline intersects the given polyline <add> */ <add> boolean intersects( S2Polyline line ) { <add> <add> } <add> <add> /** <add> * Reverse the order of vertices listed currently in the s2 polyline <add> */ <add> public void reverse() {} <add> <add> /** <add> * Return a subsequence of vertex indices such that <add> * the polyline connecting these indices is never further than the tolerance <add> * from the original polyline. The first and last vertices are always <add> * preserved. <add> * <add> * <add> * straihgt from their code <add> * // Some useful properties of the algorithm: <add> // <add> // - It runs in linear time. <add> // <add> // - The output is always a valid polyline. In particular, adjacent <add> // output vertices are never identical or antipodal. <add> // <add> // - The method is not optimal, but it tends to produce 2-3% fewer <add> // vertices than the Douglas-Peucker algorithm with the same tolerance. <add> // <add> // - The output is *parametrically* equivalent to the original polyline to <add> // within the given tolerance. For example, if a polyline backtracks on <add> // itself and then proceeds onwards, the backtracking will be preserved <add> // (to within the given tolerance). This is different than the <add> // Douglas-Peucker algorithm used in maps/util/geoutil-inl.h, which only <add> // guarantees geometric equivalence. <add> */ <add> void SubsampleVertices( double tolerance, List< Integer > indices ) { <add> <add> } <add> <add> /** <add> * Return true if two polylines have the same number of vertices and corresponding <add> * vertex pairs are separated by no more than max_error <add> * <add> * Max error defualt setting seems to be 1e-15 <add> */ <add> boolean approxEquals( S2Polyline line, double max_erorr ) { <add> <add> } <add> <add> /** <add> * Return true if "covered" is within "max_error of a contiguous subpath of <add> * this polyline over its entire length. Specifically, returns true if this polyline has parameterization a:[0,1] <add> * -> s^2, "covered" has parameterization b:[0,1]->S^2 and there is a non decreasing function f:[0, 1] -> [0,1] <add> * such that the distance(a(f(t)), b(t)) <= max_error for all t. <add> */ <add> boolean nearlyCoversPolyline( S2Polyline covered, double max_error ) { <add> <add> } <add> <add> /// Some Additional Relational Methods ////////// <add> boolean contains( Vector3D point ) { <add> <add> } <add> <add> /** <add> * Exact java .equals method <add> */ <add> @Override <add> public boolean equals( Object o ) { <add> return equals( this, o ); <add> } <add> <add> /** <add> * Definiton of full equality for 2 s2 polylines <add> */ <add> public boolean equals( S2Polyline thiz, Object o ) { <add> <add> } <add> <add> /** <add> * HashCode for the polyline <add> */ <add> @Override <add> public int hashCode() { <add> return hashCode(this); <add> } <add> <add> /** <add> * Definition of hashCode for a polyline <add> */ <add> public int hashCode( S2Polyline line ) { <add> <add> } <add> <add> /** <add> * toString Method for polyline <add> */ <add> @Override <add> public String toString() { <add> return toString(this); <add> } <add> <add> /** <add> * Definition of toString for polyline <add> */ <add> public String toString( S2Polyline thiz ) { <add> <add> } <add> <ide> }
JavaScript
agpl-3.0
5e71035ead0899eabc3f0056730e837887e4f4b5
0
Stanford-Online/edx-ora2,Stanford-Online/edx-ora2,Stanford-Online/edx-ora2,Stanford-Online/edx-ora2
(function (window) { 'use strict'; /** Interface for TrackChanges assessment view. Args: element (DOM element): The DOM element representing the XBlock. server (OpenAssessment.Server): The interface to the XBlock server. baseView (OpenAssessment.BaseView): Container view. Returns: OpenAssessment.TrackChangesView **/ var OpenAssessment = window.OpenAssessment || {}; function TrackChangesView(element, server, baseView) { this.element = element; this.server = server; this.baseView = baseView; this.content = null; } function clearChangesHandler(e) { var suffix = this.id.split('_').pop(); if (window.confirm('Are you sure you want to clear your changes?')) { e.data.trackers[suffix].rejectAll(); } } TrackChangesView.prototype.enableTrackChanges = function enableTrackChanges() { var tracker; var $ = window.jQuery; var ice = window.ice; var element; var elements = document.querySelectorAll('[id^=track-changes-content_]'); var trackers = []; if (!elements) { return; } for (var index = 0; index < elements.length; index++) { element = elements[index]; tracker = new ice.InlineChangeEditor({ element: element, handleEvents: true, currentUser: { id: 1, name: 'Reviewer' }, plugins: [ { // Track content that is cut and pasted name: 'IceCopyPastePlugin', settings: { // List of tags and attributes to preserve when cleaning a paste preserve: 'p,a[href],span[id,class]em,strong' } } ] }); tracker.startTracking(); trackers.push(tracker); $('#track_changes_clear_button_' + index).click({trackers: trackers}, clearChangesHandler); } }; TrackChangesView.prototype.getEditedContent = function getEditedContent() { var $ = window.jQuery; var changeTracking = $('#openassessment__peer-assessment'); var editedContents = []; var trackChangesContent = $('[id^=track-changes-content_]', changeTracking); if (trackChangesContent.size() > 0) { for (var index = 0; index < trackChangesContent.length; index++) { var editedContentHtml = trackChangesContent.get(index).innerHTML; editedContents.push(editedContentHtml); } } return editedContents; }; TrackChangesView.prototype.displayTrackChanges = function displayTrackChanges() { var view = this; var $ = window.jQuery; var editedResponse = $('.submission__answer__display__content.edited.part1', view.element); var gradingTitleHeader = $('#openassessment__grade .submission__answer__display__title'); gradingTitleHeader.wrapInner('<span class="original"></span>'); var peerEditSelect = $('<select><option value="original">Your Unedited Submission</option></select>') .insertBefore(gradingTitleHeader) .wrap("<div class='submission__answer__display__content__peeredit__select'>"); $('<span>Show response with: </span>').insertBefore(peerEditSelect); $(editedResponse).each(function () { var peerNumber = $(this).data('peer-num'); $('<span class="peer' + peerNumber + '">Peer ' + peerNumber + "'s Edits</span>") .appendTo(gradingTitleHeader).hide(); $('<option value="peer' + peerNumber + '">Peer ' + peerNumber + "'s Edits</option>") .appendTo(peerEditSelect); }); $(peerEditSelect).change(function () { var valueSelected = $(':selected', this).val(); $('.submission__answer__display__title span', view.element).hide(); $('.submission__answer__display__title', view.element).children('.' + valueSelected).show(); if (valueSelected === 'original') { $('.submission__answer__display__content.edited', view.element).hide(); $('.submission__answer__display__content.original', view.element).show(); } else { $('.submission__answer__display__content.original', view.element).hide(); $('.submission__answer__display__content.edited', view.element).hide(); $('.submission__answer__display__content.edited.' + valueSelected, view.element).show(); } }); }; OpenAssessment.TrackChangesView = TrackChangesView; window.OpenAssessment = OpenAssessment; }(window));
openassessment/xblock/static/js/src/lms/oa_trackchanges.js
(function (window) { 'use strict'; /** Interface for TrackChanges assessment view. Args: element (DOM element): The DOM element representing the XBlock. server (OpenAssessment.Server): The interface to the XBlock server. baseView (OpenAssessment.BaseView): Container view. Returns: OpenAssessment.TrackChangesView **/ var OpenAssessment = window.OpenAssessment || {}; function TrackChangesView(element, server, baseView) { this.element = element; this.server = server; this.baseView = baseView; this.content = null; } function clearChangesHandler(e) { var suffix = this.id.split('_').pop(); if (confirm('Are you sure you want to clear your changes?')) { e.data.trackers[suffix].rejectAll(); } } TrackChangesView.prototype.enableTrackChanges = function enableTrackChanges() { var tracker; var $ = window.jQuery; var ice = window.ice; var confirm = window.confirm; var element; var elements = document.querySelectorAll('[id^=track-changes-content_]'); var trackers = []; if (!elements) { return; } for (var index = 0; index < elements.length; index++) { element = elements[index]; tracker = new ice.InlineChangeEditor({ element: element, handleEvents: true, currentUser: { id: 1, name: 'Reviewer' }, plugins: [ { // Track content that is cut and pasted name: 'IceCopyPastePlugin', settings: { // List of tags and attributes to preserve when cleaning a paste preserve: 'p,a[href],span[id,class]em,strong' } } ] }); tracker.startTracking(); trackers.push(tracker); $('#track_changes_clear_button_' + index).click({trackers: trackers}, clearChangesHandler); } }; TrackChangesView.prototype.getEditedContent = function getEditedContent() { var $ = window.jQuery; var changeTracking = $('#openassessment__peer-assessment'); var editedContents = []; var trackChangesContent = $('[id^=track-changes-content_]', changeTracking); if (trackChangesContent.size() > 0) { for (var index = 0; index < trackChangesContent.length; index++) { var editedContentHtml = trackChangesContent.get(index).innerHTML; editedContents.push(editedContentHtml); } } return editedContents; }; TrackChangesView.prototype.displayTrackChanges = function displayTrackChanges() { var view = this; var $ = window.jQuery; var editedResponse = $('.submission__answer__display__content.edited.part1', view.element); var gradingTitleHeader = $('#openassessment__grade .submission__answer__display__title'); gradingTitleHeader.wrapInner('<span class="original"></span>'); var peerEditSelect = $('<select><option value="original">Your Unedited Submission</option></select>') .insertBefore(gradingTitleHeader) .wrap("<div class='submission__answer__display__content__peeredit__select'>"); $('<span>Show response with: </span>').insertBefore(peerEditSelect); $(editedResponse).each(function () { var peerNumber = $(this).data('peer-num'); $('<span class="peer' + peerNumber + '">Peer ' + peerNumber + "'s Edits</span>") .appendTo(gradingTitleHeader).hide(); $('<option value="peer' + peerNumber + '">Peer ' + peerNumber + "'s Edits</option>") .appendTo(peerEditSelect); }); $(peerEditSelect).change(function () { var valueSelected = $(':selected', this).val(); $('.submission__answer__display__title span', view.element).hide(); $('.submission__answer__display__title', view.element).children('.' + valueSelected).show(); if (valueSelected === 'original') { $('.submission__answer__display__content.edited', view.element).hide(); $('.submission__answer__display__content.original', view.element).show(); } else { $('.submission__answer__display__content.original', view.element).hide(); $('.submission__answer__display__content.edited', view.element).hide(); $('.submission__answer__display__content.edited.' + valueSelected, view.element).show(); } }); }; OpenAssessment.TrackChangesView = TrackChangesView; window.OpenAssessment = OpenAssessment; }(window));
Fix for trackchanges confirm dialog
openassessment/xblock/static/js/src/lms/oa_trackchanges.js
Fix for trackchanges confirm dialog
<ide><path>penassessment/xblock/static/js/src/lms/oa_trackchanges.js <ide> <ide> function clearChangesHandler(e) { <ide> var suffix = this.id.split('_').pop(); <del> if (confirm('Are you sure you want to clear your changes?')) { <add> if (window.confirm('Are you sure you want to clear your changes?')) { <ide> e.data.trackers[suffix].rejectAll(); <ide> } <ide> } <ide> var tracker; <ide> var $ = window.jQuery; <ide> var ice = window.ice; <del> var confirm = window.confirm; <ide> var element; <ide> var elements = document.querySelectorAll('[id^=track-changes-content_]'); <ide> var trackers = [];
Java
apache-2.0
cdf7dabd05a60a8bd953108a27497cb9f9aa5aa2
0
nus-ncl/service-web,nus-ncl/service-web,nus-ncl/service-web,nus-ncl/service-web
package sg.ncl; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import javax.servlet.http.HttpSession; import javax.validation.Valid; import org.apache.tomcat.util.codec.binary.Base64; import org.json.JSONObject; import org.springframework.http.*; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.util.FileCopyUtils; import org.springframework.web.multipart.MultipartFile; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.client.RestTemplate; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import sg.ncl.testbed_interface.*; /** * * Spring Controller * Direct the views to appropriate locations and invoke the respective REST API * @author yeoteye * */ @Controller public class MainController { private final String SESSION_LOGGED_IN_USER_ID = "loggedInUserId"; private final int ERROR_NO_SUCH_USER_ID = 0; private final static Logger LOGGER = Logger.getLogger(MainController.class.getName()); private int CURRENT_LOGGED_IN_USER_ID = ERROR_NO_SUCH_USER_ID; private boolean IS_USER_ADMIN = false; private TeamManager teamManager = TeamManager.getInstance(); private UserManager userManager = UserManager.getInstance(); private ExperimentManager experimentManager = ExperimentManager.getInstance(); private DomainManager domainManager = DomainManager.getInstance(); private DatasetManager datasetManager = DatasetManager.getInstance(); private NodeManager nodeManager = NodeManager.getInstance(); private String SCENARIOS_DIR_PATH = "src/main/resources/scenarios"; // private final String USERS_URI = "http://localhost:8080/users/"; // private final String AUTHENTICATION_URI = "http://localhost:8080/authentication"; private final String USERS_URI = "http://localhost:80/users/"; private final String AUTHENTICATION_URI = "http://localhost:80/authentication"; private final String USER_ID = "b0086f54-b1e1-46fe-8d95-d101d7c265ad"; @RequestMapping("/") public String index() { return "index"; } @RequestMapping(value="/login", method=RequestMethod.GET) public String login(Model model) { model.addAttribute("loginForm", new LoginForm()); return "login"; } @RequestMapping(value="/login", method=RequestMethod.POST) public String loginSubmit(@ModelAttribute("loginForm") LoginForm loginForm, Model model, HttpSession session) throws Exception { // following is to test if form fields can be retrieved via user input // pretend as though this is a server side validation try { String inputEmail = loginForm.getLoginEmail(); String inputPwd = loginForm.getLoginPassword(); RestTemplate restTemplate = new RestTemplate(); String plainCreds = inputEmail + ":" + inputPwd; byte[] plainCredsBytes = plainCreds.getBytes(); byte[] base64CredsBytes = Base64.encodeBase64(plainCredsBytes); String base64Creds = new String(base64CredsBytes); HttpHeaders headers = new HttpHeaders(); headers.set("Authorization", "Basic " + base64Creds); HttpEntity<String> request = new HttpEntity<String>("parameters", headers); ResponseEntity responseEntity = restTemplate.exchange(AUTHENTICATION_URI, HttpMethod.POST, request, String.class); // TODO call the proper validation functions if (responseEntity.getBody().toString().equals("[email protected]")) { return "redirect:/dashboard"; } } catch (Exception e) { // TODO should catch credentialsNotFound exception or a more elegant way of doing // case1: invalid login loginForm.setErrorMsg("Invalid email/password."); return "login"; } return "login"; /* String inputEmail = loginForm.getLoginEmail(); int userId = userManager.getUserIdByEmail(inputEmail); if (userManager.validateLoginDetails(loginForm.getLoginEmail(), loginForm.getLoginPassword()) == false) { // case1: invalid login loginForm.setErrorMsg("Invalid email/password."); return "login"; } else if (userManager.isEmailVerified(loginForm.getLoginEmail()) == false) { // case2: email address not validated model.addAttribute("emailAddress", loginForm.getLoginEmail()); return "redirect:/email_not_validated"; } else if (teamManager.getApprovedTeams(userId) == 0 && teamManager.getJoinRequestTeamMap2(userId) != null) { // case3 // user is not a team owner nor a team member // user has request to join a team but has not been approved by the team owner return "redirect:/join_application_awaiting_approval"; } else if (teamManager.getApprovedTeams(userId) == 0 && teamManager.getUnApprovedTeams(userId) > 0) { // case4: since it goes through case3, user must be applying for a team // team approval under review // email address is supposed to be valid here return "redirect:/team_application_under_review"; } else { // all validated // user may have no team at this point due to rejected team application or join request // must allow user to login so that user can apply again // set login CURRENT_LOGGED_IN_USER_ID = userManager.getUserIdByEmail(loginForm.getLoginEmail()); IS_USER_ADMIN = userManager.isUserAdmin(CURRENT_LOGGED_IN_USER_ID); session.setAttribute("isUserAdmin", IS_USER_ADMIN); session.setAttribute(SESSION_LOGGED_IN_USER_ID, CURRENT_LOGGED_IN_USER_ID); return "redirect:/dashboard"; } */ } @RequestMapping("/passwordreset") public String passwordreset(Model model) { model.addAttribute("loginForm", new LoginForm()); return "passwordreset"; } @RequestMapping("/dashboard") public String dashboard(Model model) { return "dashboard"; } @RequestMapping(value="/logout", method=RequestMethod.GET) public String logout(HttpSession session) { CURRENT_LOGGED_IN_USER_ID = ERROR_NO_SUCH_USER_ID; session.removeAttribute("isUserAdmin"); session.removeAttribute(SESSION_LOGGED_IN_USER_ID); return "redirect:/"; } //--------------------------Sign Up Page-------------------------- @RequestMapping(value="/signup2", method=RequestMethod.GET) public String signup2(Model model) { // TODO get each model data and put into relevant ones model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "signup2"; } @RequestMapping(value="/signup2", method=RequestMethod.POST) public String validateDetails(@ModelAttribute("loginForm") LoginForm loginForm, @ModelAttribute("signUpMergedForm") SignUpMergedForm signUpMergedForm) { // TODO get each model data and put into relevant ones // add to User model User newUser = new User(); newUser.setEmail(signUpMergedForm.getEmail()); newUser.setPassword(signUpMergedForm.getPassword()); newUser.setConfirmPassword(signUpMergedForm.getPassword()); newUser.setRole("normal"); newUser.setEmailVerified(false); newUser.setName(signUpMergedForm.getName()); newUser.setJobTitle(signUpMergedForm.getJobTitle()); newUser.setInstitution(signUpMergedForm.getInstitution()); newUser.setInstitutionAbbreviation(signUpMergedForm.getInstitutionAbbreviation()); newUser.setWebsite(signUpMergedForm.getWebsite()); newUser.setAddress1(signUpMergedForm.getAddress1()); newUser.setAddress2(signUpMergedForm.getAddress2()); newUser.setCountry(signUpMergedForm.getCountry()); newUser.setCity(signUpMergedForm.getCity()); newUser.setProvince(signUpMergedForm.getProvince()); newUser.setPostalCode(signUpMergedForm.getPostalCode()); userManager.addNewUser(newUser); int newGeneratedUserId = newUser.getUserId(); // check if user chose create new team or join existing team by checking team name String createNewTeamName = signUpMergedForm.getTeamName(); String joinNewTeamName = signUpMergedForm.getJoinTeamName(); // System.out.println("New team name: " + createNewTeamName); // System.out.println("Join existing team name: " + joinNewTeamName); if (createNewTeamName.isEmpty() == false) { // System.out.println("apply for new team"); // add to team model Team newTeam = new Team(); newTeam.setName(createNewTeamName); newTeam.setDescription(signUpMergedForm.getTeamDescription()); newTeam.setWebsite(signUpMergedForm.getTeamDescription()); newTeam.setOrganizationType(signUpMergedForm.getTeamOrganizationType()); newTeam.setIsPublic(signUpMergedForm.getIsPublic()); newTeam.setTeamOwnerId(newGeneratedUserId); newTeam.setIsApproved(false); teamManager.addNewTeam(newTeam); // redirect to application submitted return "redirect:/team_application_submitted"; } else if (joinNewTeamName.isEmpty() == false) { // System.out.println("join existing new team"); // add user request to join team int teamId = teamManager.getTeamIdByTeamName(joinNewTeamName); teamManager.addJoinRequestTeamMap2(newGeneratedUserId, teamId, userManager.getUserById(newGeneratedUserId)); // redirect to join request submitted return "redirect:/join_application_submitted"; } else { // logic error not suppose to reach here return "redirect:/signup2"; } } //--------------------------Account Settings Page-------------------------- @RequestMapping(value="/account_settings", method=RequestMethod.GET) public String accountDetails(Model model, HttpSession session) throws IOException { // TODO id should be some session variable? String userId_uri = USERS_URI + "{id}"; RestTemplate restTemplate = new RestTemplate(); String result = restTemplate.getForObject(userId_uri, String.class, USER_ID); /* User editUser = userManager.getUserById(getSessionIdOfLoggedInUser(session)); model.addAttribute("editUser", editUser); */ User2 user2 = extractUserInfo(result); model.addAttribute("editUser", user2); return "account_settings"; } @RequestMapping(value="/account_settings", method=RequestMethod.POST) public String editAccountDetails(@ModelAttribute("editUser") User2 editUser, final RedirectAttributes redirectAttributes, HttpSession session) { // Need to be this way to "edit" details // If not, the form details will overwrite existing user's details JSONObject object = new JSONObject(); JSONObject userDetails = new JSONObject(); JSONObject address = new JSONObject(); userDetails.put("firstName", editUser.getFirstName()); userDetails.put("lastName", editUser.getLastName()); userDetails.put("email", editUser.getEmail()); userDetails.put("phone", editUser.getPhone()); userDetails.put("address", address); address.put("address1", editUser.getAddress1()); address.put("address2", editUser.getAddress2()); address.put("country", editUser.getCountry()); address.put("region", editUser.getRegion()); address.put("zipCode", editUser.getZipCode()); object.put("userDetails", userDetails); String userId_uri = USERS_URI + USER_ID; RestTemplate restTemplate = new RestTemplate(); HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); HttpEntity<String> request = new HttpEntity<String>(object.toString(), headers); ResponseEntity responseEntity = restTemplate.exchange(userId_uri, HttpMethod.PUT, request, String.class); /* // TODO for email changes need to resend email confirmation User originalUser = userManager.getUserById(getSessionIdOfLoggedInUser(session)); String editedName = editUser.getName(); String editedPassword = editUser.getPassword(); String editedConfirmPassword = editUser.getConfirmPassword(); String editedJobTitle = editUser.getJobTitle(); String editedInstitution = editUser.getInstitution(); String editedInstitutionAbbreviation = editUser.getInstitutionAbbreviation(); String editedWebsite = editUser.getWebsite(); String editedAddress1 = editUser.getAddress1(); String editedAddress2 = editUser.getAddress2(); String editedCountry = editUser.getCountry(); String editedCity = editUser.getCity(); String editedProvince = editUser.getProvince(); String editedPostalCode = editUser.getPostalCode(); if (originalUser.updateName(editedName) == true) { redirectAttributes.addFlashAttribute("editName", "success"); } if (editedPassword.equals(editedConfirmPassword) == false) { redirectAttributes.addFlashAttribute("editPasswordMismatch", "unsuccess"); } else if (originalUser.updatePassword(editedPassword) == true) { redirectAttributes.addFlashAttribute("editPassword", "success"); } else { redirectAttributes.addFlashAttribute("editPassword", "unsuccess"); } if (originalUser.updateJobTitle(editedJobTitle) == true) { redirectAttributes.addFlashAttribute("editJobTitle", "success"); } if (originalUser.updateInstitution(editedInstitution) == true) { redirectAttributes.addFlashAttribute("editInstitution", "success"); } if (originalUser.updateInstitutionAbbreviation(editedInstitutionAbbreviation) == true) { redirectAttributes.addFlashAttribute("editInstitutionAbbreviation", "success"); } if (originalUser.updateWebsite(editedWebsite) == true) { redirectAttributes.addFlashAttribute("editWebsite", "success"); } if (originalUser.updateAddress1(editedAddress1) == true) { redirectAttributes.addFlashAttribute("editAddress1", "success"); } if (originalUser.updateAddress2(editedAddress2) == true) { redirectAttributes.addFlashAttribute("editAddress2", "success"); } if (originalUser.updateCountry(editedCountry) == true) { redirectAttributes.addFlashAttribute("editCountry", "success"); } if (originalUser.updateCity(editedCity) == true) { redirectAttributes.addFlashAttribute("editCity", "success"); } if (originalUser.updateProvince(editedProvince) == true) { redirectAttributes.addFlashAttribute("editProvince", "success"); } if (originalUser.updatePostalCode(editedPostalCode) == true) { redirectAttributes.addFlashAttribute("editPostalCode", "success"); } userManager.updateUserDetails(originalUser); return "redirect:/account_settings"; */ return "redirect:/account_settings"; } //--------------------User Side Approve Members Page------------ @RequestMapping("/approve_new_user") public String approveNewUser(Model model, HttpSession session) { HashMap<Integer, Team> rv = new HashMap<Integer, Team>(); rv = teamManager.getTeamMapByTeamOwner(getSessionIdOfLoggedInUser(session)); boolean userHasAnyJoinRequest = hasAnyJoinRequest(rv); model.addAttribute("teamMapOwnedByUser", rv); model.addAttribute("userHasAnyJoinRequest", userHasAnyJoinRequest); return "approve_new_user"; } @RequestMapping("/approve_new_user/accept/{teamId}/{userId}") public String userSideAcceptJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.acceptJoinRequest(userId, teamId); return "redirect:/approve_new_user"; } @RequestMapping("/approve_new_user/reject/{teamId}/{userId}") public String userSideRejectJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.rejectJoinRequest(userId, teamId); return "redirect:/approve_new_user"; } //--------------------------Teams Page-------------------------- @RequestMapping("/teams") public String teams(Model model, HttpSession session) { int currentLoggedInUserId = getSessionIdOfLoggedInUser(session); model.addAttribute("infoMsg", teamManager.getInfoMsg()); model.addAttribute("currentLoggedInUserId", currentLoggedInUserId); model.addAttribute("teamMap", teamManager.getTeamMap(currentLoggedInUserId)); model.addAttribute("publicTeamMap", teamManager.getPublicTeamMap()); model.addAttribute("invitedToParticipateMap2", teamManager.getInvitedToParticipateMap2(currentLoggedInUserId)); model.addAttribute("joinRequestMap2", teamManager.getJoinRequestTeamMap2(currentLoggedInUserId)); // REST Client Code // final String uri = host + "teams/?"; // RestTemplate restTemplate = new RestTemplate(); // TeamsList result = restTemplate.getForObject(uri, TeamsList.class); return "teams"; } @RequestMapping("/accept_participation/{teamId}") public String acceptParticipationRequest(@PathVariable Integer teamId, Model model, HttpSession session) { int currentLoggedInUserId = getSessionIdOfLoggedInUser(session); // get user's participation request list // add this user id to the requested list teamManager.acceptParticipationRequest(currentLoggedInUserId, teamId); // remove participation request since accepted teamManager.removeParticipationRequest(currentLoggedInUserId, teamId); // must get team name String teamName = teamManager.getTeamNameByTeamId(teamId); teamManager.setInfoMsg("You have just joined Team " + teamName + " !"); return "redirect:/teams"; } @RequestMapping("/ignore_participation/{teamId}") public String ignoreParticipationRequest(@PathVariable Integer teamId, Model model, HttpSession session) { // get user's participation request list // remove this user id from the requested list String teamName = teamManager.getTeamNameByTeamId(teamId); teamManager.ignoreParticipationRequest2(getSessionIdOfLoggedInUser(session), teamId); teamManager.setInfoMsg("You have just ignored a team request from Team " + teamName + " !"); return "redirect:/teams"; } @RequestMapping("/withdraw/{teamId}") public String withdrawnJoinRequest(@PathVariable Integer teamId, Model model, HttpSession session) { // get user team request // remove this user id from the user's request list String teamName = teamManager.getTeamNameByTeamId(teamId); teamManager.removeUserJoinRequest2(getSessionIdOfLoggedInUser(session), teamId); teamManager.setInfoMsg("You have withdrawn your join request for Team " + teamName); return "redirect:/teams"; } @RequestMapping(value="/teams/invite_members/{teamId}", method=RequestMethod.GET) public String inviteMember(@PathVariable Integer teamId, Model model) { model.addAttribute("teamIdVar", teamId); model.addAttribute("teamPageInviteMemberForm", new TeamPageInviteMemberForm()); return "team_page_invite_members"; } @RequestMapping(value="/teams/invite_members/{teamId}", method=RequestMethod.POST) public String sendInvitation(@PathVariable Integer teamId, @ModelAttribute TeamPageInviteMemberForm teamPageInviteMemberForm,Model model) { int userId = userManager.getUserIdByEmail(teamPageInviteMemberForm.getInviteUserEmail()); teamManager.addInvitedToParticipateMap(userId, teamId); return "redirect:/teams"; } @RequestMapping(value="/teams/members_approval/{teamId}", method=RequestMethod.GET) public String membersApproval(@PathVariable Integer teamId, Model model) { model.addAttribute("team", teamManager.getTeamByTeamId(teamId)); return "team_page_approve_members"; } @RequestMapping("/teams/members_approval/accept/{teamId}/{userId}") public String acceptJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.acceptJoinRequest(userId, teamId); return "redirect:/teams/members_approval/{teamId}"; } @RequestMapping("/teams/members_approval/reject/{teamId}/{userId}") public String rejectJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.rejectJoinRequest(userId, teamId); return "redirect:/teams/members_approval/{teamId}"; } //--------------------------Team Profile Page-------------------------- @RequestMapping("/team_profile/{teamId}") public String teamProfile(@PathVariable Integer teamId, Model model, HttpSession session) { model.addAttribute("currentLoggedInUserId", getSessionIdOfLoggedInUser(session)); model.addAttribute("team", teamManager.getTeamByTeamId(teamId)); model.addAttribute("membersMap", teamManager.getTeamByTeamId(teamId).getMembersMap()); model.addAttribute("userManager", userManager); model.addAttribute("teamExpMap", experimentManager.getTeamExperimentsMap(teamId)); return "team_profile"; } @RequestMapping("/remove_member/{teamId}/{userId}") public String removeMember(@PathVariable Integer teamId, @PathVariable Integer userId, Model model) { // TODO check if user is indeed in the team // TODO what happens to active experiments of the user? // remove member from the team // reduce the team count teamManager.removeMembers(userId, teamId); return "redirect:/team_profile/{teamId}"; } @RequestMapping("/team_profile/{teamId}/start_experiment/{expId}") public String startExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // start experiment // ensure experiment is stopped first before starting experimentManager.startExperiment(getSessionIdOfLoggedInUser(session), expId); return "redirect:/team_profile/{teamId}"; } @RequestMapping("/team_profile/{teamId}/stop_experiment/{expId}") public String stopExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // stop experiment // ensure experiment is in ready mode before stopping experimentManager.stopExperiment(getSessionIdOfLoggedInUser(session), expId); return "redirect:/team_profile/{teamId}"; } @RequestMapping("/team_profile/{teamId}/remove_experiment/{expId}") public String removeExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // remove experiment // TODO check userid is indeed the experiment owner or team owner // ensure experiment is stopped first if (experimentManager.removeExperiment(getSessionIdOfLoggedInUser(session), expId) == true) { // decrease exp count to be display on Teams page teamManager.decrementExperimentCount(teamId); } model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); return "redirect:/team_profile/{teamId}"; } @RequestMapping(value="/team_profile/invite_user/{teamId}", method=RequestMethod.GET) public String inviteUserFromTeamProfile(@PathVariable Integer teamId, Model model) { model.addAttribute("teamIdVar", teamId); model.addAttribute("teamPageInviteMemberForm", new TeamPageInviteMemberForm()); return "team_profile_invite_members"; } @RequestMapping(value="/team_profile/invite_user/{teamId}", method=RequestMethod.POST) public String sendInvitationFromTeamProfile(@PathVariable Integer teamId, @ModelAttribute TeamPageInviteMemberForm teamPageInviteMemberForm, Model model) { int userId = userManager.getUserIdByEmail(teamPageInviteMemberForm.getInviteUserEmail()); teamManager.addInvitedToParticipateMap(userId, teamId); return "redirect:/team_profile/{teamId}"; } //--------------------------Apply for New Team Page-------------------------- @RequestMapping(value="/teams/apply_team", method=RequestMethod.GET) public String teamPageApplyTeam(Model model) { model.addAttribute("teamPageApplyTeamForm", new TeamPageApplyTeamForm()); return "team_page_apply_team"; } @RequestMapping(value="/teams/apply_team", method=RequestMethod.POST) public String checkApplyTeamInfo(@Valid TeamPageApplyTeamForm teamPageApplyTeamForm, BindingResult bindingResult) { if (bindingResult.hasErrors()) { // return "redirect:/teams/apply_team"; return "team_page_apply_team"; } // log data to ensure data has been parsed LOGGER.log(Level.INFO, "--------Apply for new team info---------"); LOGGER.log(Level.INFO, teamPageApplyTeamForm.toString()); return "redirect:/teams/team_application_submitted"; } @RequestMapping(value="/team_owner_policy", method=RequestMethod.GET) public String teamOwnerPolicy() { return "team_owner_policy"; } //--------------------------Join Team Page-------------------------- @RequestMapping(value="/teams/join_team", method=RequestMethod.GET) public String teamPageJoinTeam(Model model) { model.addAttribute("teamPageJoinTeamForm", new TeamPageJoinTeamForm()); return "team_page_join_team"; } @RequestMapping(value="/teams/join_team", method=RequestMethod.POST) public String checkJoinTeamInfo(@Valid TeamPageJoinTeamForm teamPageJoinForm, BindingResult bindingResult, Model model, HttpSession session) { if (bindingResult.hasErrors()) { return "team_page_join_team"; } // log data to ensure data has been parsed LOGGER.log(Level.INFO, "--------Join team---------"); LOGGER.log(Level.INFO, teamPageJoinForm.toString()); // perform join team request here // add to user join team list // ensure user is not already in the team or have submitted the application // add to team join request map also for members approval function User currentUser = userManager.getUserById(getSessionIdOfLoggedInUser(session)); int teamId = teamManager.getTeamIdByTeamName(teamPageJoinForm.getTeamName()); teamManager.addJoinRequestTeamMap2(getSessionIdOfLoggedInUser(session), teamId, currentUser); return "redirect:/teams/join_application_submitted/" + teamId; } //--------------------------Experiment Page-------------------------- @RequestMapping(value="/experiments", method=RequestMethod.GET) public String experiments(Model model, HttpSession session) { model.addAttribute("teamManager", teamManager); model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); return "experiments"; } @RequestMapping(value="/experiments/create", method=RequestMethod.GET) public String createExperiment(Model model, HttpSession session) { List<String> scenarioFileNameList = getScenarioFileNameList(); model.addAttribute("experiment", new Experiment()); model.addAttribute("scenarioFileNameList", scenarioFileNameList); model.addAttribute("teamMap", teamManager.getTeamMap(getSessionIdOfLoggedInUser(session))); return "experiment_page_create_experiment"; } @RequestMapping(value="/experiments/create", method=RequestMethod.POST) public String validateExperiment(@ModelAttribute Experiment experiment, Model model, HttpSession session, @RequestParam("networkConfiguration") MultipartFile networkFile, @RequestParam("dataset") MultipartFile dataFile, RedirectAttributes redirectAttributes) { // TODO Uploaded function for network configuration and optional dataset if (!networkFile.isEmpty()) { try { String networkFileName = getSessionIdOfLoggedInUser(session) + "-networkconfig-" + networkFile.getOriginalFilename(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(new File(Application.EXP_CONFIG_DIR + "/" + networkFileName))); FileCopyUtils.copy(networkFile.getInputStream(), stream); stream.close(); redirectAttributes.addFlashAttribute("message", "You successfully uploaded " + networkFile.getOriginalFilename() + "!"); // remember network file name here } catch (Exception e) { redirectAttributes.addFlashAttribute("message", "You failed to upload " + networkFile.getOriginalFilename() + " => " + e.getMessage()); return "redirect:/experiments/create"; } } if (!dataFile.isEmpty()) { try { String dataFileName = getSessionIdOfLoggedInUser(session) + "-data-" + dataFile.getOriginalFilename(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(new File(Application.EXP_CONFIG_DIR + "/" + dataFileName))); FileCopyUtils.copy(dataFile.getInputStream(), stream); stream.close(); redirectAttributes.addFlashAttribute("message2", "You successfully uploaded " + dataFile.getOriginalFilename() + "!"); // remember data file name here } catch (Exception e) { redirectAttributes.addFlashAttribute("message2", "You failed to upload " + dataFile.getOriginalFilename() + " => " + e.getMessage()); } } // add current experiment to experiment manager experimentManager.addExperiment(getSessionIdOfLoggedInUser(session), experiment); // increase exp count to be display on Teams page teamManager.incrementExperimentCount(experiment.getTeamId()); return "redirect:/experiments"; } @RequestMapping("/experiments/configuration/{expId}") public String viewExperimentConfiguration(@PathVariable Integer expId, Model model) { // get experiment from expid // retrieve the scenario contents to be displayed Experiment currExp = experimentManager.getExperimentByExpId(expId); model.addAttribute("scenarioContents", currExp.getScenarioContents()); return "experiment_scenario_contents"; } @RequestMapping("/remove_experiment/{expId}") public String removeExperiment(@PathVariable Integer expId, Model model, HttpSession session) { // remove experiment // TODO check userid is indeed the experiment owner or team owner // ensure experiment is stopped first int teamId = experimentManager.getExperimentByExpId(expId).getTeamId(); experimentManager.removeExperiment(getSessionIdOfLoggedInUser(session), expId); model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); // decrease exp count to be display on Teams page teamManager.decrementExperimentCount(teamId); return "redirect:/experiments"; } @RequestMapping("/start_experiment/{expId}") public String startExperiment(@PathVariable Integer expId, Model model, HttpSession session) { // start experiment // ensure experiment is stopped first before starting experimentManager.startExperiment(getSessionIdOfLoggedInUser(session), expId); model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); return "redirect:/experiments"; } @RequestMapping("/stop_experiment/{expId}") public String stopExperiment(@PathVariable Integer expId, Model model, HttpSession session) { // stop experiment // ensure experiment is in ready mode before stopping experimentManager.stopExperiment(getSessionIdOfLoggedInUser(session), expId); model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); return "redirect:/experiments"; } //---------------------------------Dataset Page-------------------------- @RequestMapping("/data") public String data(Model model, HttpSession session) { model.addAttribute("datasetOwnedByUserList", datasetManager.getDatasetContributedByUser(getSessionIdOfLoggedInUser(session))); model.addAttribute("datasetAccessibleByUserList", datasetManager.getDatasetAccessibleByuser(getSessionIdOfLoggedInUser(session))); model.addAttribute("userManager", userManager); return "data"; } @RequestMapping(value="/data/contribute", method=RequestMethod.GET) public String contributeData(Model model) { model.addAttribute("dataset", new Dataset()); File rootFolder = new File(Application.ROOT); List<String> fileNames = Arrays.stream(rootFolder.listFiles()) .map(f -> f.getName()) .collect(Collectors.toList()); model.addAttribute("files", Arrays.stream(rootFolder.listFiles()) .sorted(Comparator.comparingLong(f -> -1 * f.lastModified())) .map(f -> f.getName()) .collect(Collectors.toList()) ); return "contribute_data"; } @RequestMapping(value="/data/contribute", method=RequestMethod.POST) public String validateContributeData(@ModelAttribute("dataset") Dataset dataset, HttpSession session, @RequestParam("file") MultipartFile file, RedirectAttributes redirectAttributes) { // TODO // validation // get file from user upload to server if (!file.isEmpty()) { try { String fileName = getSessionIdOfLoggedInUser(session) + "-" + file.getOriginalFilename(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(new File(Application.ROOT + "/" + fileName))); FileCopyUtils.copy(file.getInputStream(), stream); stream.close(); redirectAttributes.addFlashAttribute("message", "You successfully uploaded " + file.getOriginalFilename() + "!"); datasetManager.addDataset(getSessionIdOfLoggedInUser(session), dataset, file.getOriginalFilename()); } catch (Exception e) { redirectAttributes.addFlashAttribute("message", "You failed to upload " + file.getOriginalFilename() + " => " + e.getMessage()); } } else { redirectAttributes.addFlashAttribute("message", "You failed to upload " + file.getOriginalFilename() + " because the file was empty"); } return "redirect:/data"; } @RequestMapping(value="/data/edit/{datasetId}", method=RequestMethod.GET) public String datasetInfo(@PathVariable Integer datasetId, Model model) { Dataset dataset = datasetManager.getDataset(datasetId); model.addAttribute("editDataset", dataset); return "edit_data"; } @RequestMapping(value="/data/edit/{datasetId}", method=RequestMethod.POST) public String editDatasetInfo(@PathVariable Integer datasetId, @ModelAttribute("editDataset") Dataset dataset, final RedirectAttributes redirectAttributes) { Dataset origDataset = datasetManager.getDataset(datasetId); String editedDatasetName = dataset.getDatasetName(); String editedDatasetDesc = dataset.getDatasetDescription(); String editedDatasetLicense = dataset.getLicense(); String editedDatasetPublic = dataset.getIsPublic(); boolean editedDatasetIsRequiredAuthorization = dataset.getRequireAuthorization(); System.out.println(origDataset.getDatasetId()); System.out.println(dataset.getDatasetId()); if (origDataset.updateName(editedDatasetName) == true) { redirectAttributes.addFlashAttribute("editName", "success"); } if (origDataset.updateDescription(editedDatasetDesc) == true) { redirectAttributes.addFlashAttribute("editDesc", "success"); } if (origDataset.updateLicense(editedDatasetLicense) == true) { redirectAttributes.addFlashAttribute("editLicense", "success"); } if (origDataset.updatePublic(editedDatasetPublic) == true) { redirectAttributes.addFlashAttribute("editPublic", "success"); } if (origDataset.updateAuthorization(editedDatasetIsRequiredAuthorization) == true) { redirectAttributes.addFlashAttribute("editIsRequiredAuthorization", "success"); } datasetManager.updateDatasetDetails(origDataset); return "redirect:/data/edit/{datasetId}"; } @RequestMapping("/data/remove_dataset/{datasetId}") public String removeDataset(@PathVariable Integer datasetId) { datasetManager.removeDataset(datasetId); return "redirect:/data"; } @RequestMapping("/data/public") public String openDataset(Model model) { model.addAttribute("publicDataMap", datasetManager.getDatasetMap()); model.addAttribute("userManager", userManager); return "data_public"; } @RequestMapping("/data/public/request_access/{dataOwnerId}") public String requestAccessForDataset(@PathVariable Integer dataOwnerId, Model model) { // TODO // send reuqest to team owner // show feedback to users User rv = userManager.getUserById(dataOwnerId); model.addAttribute("ownerName", rv.getName()); model.addAttribute("ownerEmail", rv.getEmail()); return "data_request_access"; } //---------------------------------Admin--------------------------------- @RequestMapping("/admin") public String admin(Model model) { model.addAttribute("domain", new Domain()); model.addAttribute("domainTable", domainManager.getDomainTable()); model.addAttribute("usersMap", userManager.getUserMap()); model.addAttribute("teamsMap", teamManager.getTeamMap()); model.addAttribute("teamManager", teamManager); model.addAttribute("teamsPendingApprovalMap", teamManager.getTeamsPendingApproval()); model.addAttribute("experimentMap", experimentManager.getExperimentMap2()); model.addAttribute("totalTeamCount", teamManager.getTotalTeamsCount()); model.addAttribute("totalExpCount", experimentManager.getTotalExpCount()); model.addAttribute("totalMemberCount", teamManager.getTotalMembersCount()); model.addAttribute("totalMemberAwaitingApprovalCount", teamManager.getTotalMembersAwaitingApproval()); model.addAttribute("datasetMap", datasetManager.getDatasetMap()); model.addAttribute("userManager", userManager); model.addAttribute("nodeMap", nodeManager.getNodeMap()); return "admin"; } @RequestMapping(value="/admin/domains/add", method=RequestMethod.POST) public String addDomain(@Valid Domain domain, BindingResult bindingResult) { if (bindingResult.hasErrors()) { return "redirect:/admin"; } else { domainManager.addDomains(domain.getDomainName()); } return "redirect:/admin"; } @RequestMapping("/admin/domains/remove/{domainKey}") public String removeDomain(@PathVariable String domainKey) { domainManager.removeDomains(domainKey); return "redirect:/admin"; } @RequestMapping("/admin/teams/accept/{teamId}") public String approveTeam(@PathVariable Integer teamId) { // set the approved flag to true teamManager.approveTeamApplication(teamId); return "redirect:/admin"; } @RequestMapping("/admin/teams/reject/{teamId}") public String rejectTeam(@PathVariable Integer teamId) { // need to cleanly remove the team application teamManager.rejectTeamApplication(teamId); return "redirect:/admin"; } @RequestMapping("/admin/users/ban/{userId}") public String banUser(@PathVariable Integer userId) { // TODO // perform ban action here // need to cleanly remove user info from teams, user. etc return "redirect:/admin"; } @RequestMapping("/admin/experiments/remove/{expId}") public String adminRemoveExp(@PathVariable Integer expId) { int teamId = experimentManager.getExperimentByExpId(expId).getTeamId(); experimentManager.adminRemoveExperiment(expId); // decrease exp count to be display on Teams page teamManager.decrementExperimentCount(teamId); return "redirect:/admin"; } @RequestMapping(value="/admin/data/contribute", method=RequestMethod.GET) public String adminContributeDataset(Model model) { model.addAttribute("dataset", new Dataset()); File rootFolder = new File(Application.ROOT); List<String> fileNames = Arrays.stream(rootFolder.listFiles()) .map(f -> f.getName()) .collect(Collectors.toList()); model.addAttribute("files", Arrays.stream(rootFolder.listFiles()) .sorted(Comparator.comparingLong(f -> -1 * f.lastModified())) .map(f -> f.getName()) .collect(Collectors.toList()) ); return "admin_contribute_data"; } @RequestMapping(value="/admin/data/contribute", method=RequestMethod.POST) public String validateAdminContributeDataset(@ModelAttribute("dataset") Dataset dataset, HttpSession session, @RequestParam("file") MultipartFile file, RedirectAttributes redirectAttributes) { // TODO // validation // get file from user upload to server if (!file.isEmpty()) { try { String fileName = getSessionIdOfLoggedInUser(session) + "-" + file.getOriginalFilename(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(new File(Application.ROOT + "/" + fileName))); FileCopyUtils.copy(file.getInputStream(), stream); stream.close(); redirectAttributes.addFlashAttribute("message", "You successfully uploaded " + file.getOriginalFilename() + "!"); datasetManager.addDataset(getSessionIdOfLoggedInUser(session), dataset, file.getOriginalFilename()); } catch (Exception e) { redirectAttributes.addFlashAttribute("message", "You failed to upload " + file.getOriginalFilename() + " => " + e.getMessage()); } } else { redirectAttributes.addFlashAttribute("message", "You failed to upload " + file.getOriginalFilename() + " because the file was empty"); } return "redirect:/admin"; } @RequestMapping("/admin/data/remove/{datasetId}") public String adminRemoveDataset(@PathVariable Integer datasetId) { datasetManager.removeDataset(datasetId); return "redirect:/admin"; } @RequestMapping(value="/admin/node/add", method=RequestMethod.GET) public String adminAddNode(Model model) { model.addAttribute("node", new Node()); return "admin_add_node"; } @RequestMapping(value="/admin/node/add", method=RequestMethod.POST) public String adminAddNode(@ModelAttribute("node") Node node) { // TODO // validate fields, eg should be integer nodeManager.addNode(node); return "redirect:/admin"; } //--------------------------Static pages for teams-------------------------- @RequestMapping("/teams/team_application_submitted") public String teamAppSubmitFromTeamsPage() { return "team_page_application_submitted"; } @RequestMapping("/teams/join_application_submitted/{teamId}") public String teamAppJoinFromTeamsPage(@PathVariable Integer teamId, Model model) { int teamOwnerId = teamManager.getTeamByTeamId(teamId).getTeamOwnerId(); model.addAttribute("teamOwner", userManager.getUserById(teamOwnerId)); return "team_page_join_application_submitted"; } //--------------------------Static pages for sign up-------------------------- @RequestMapping("/team_application_submitted") public String teamAppSubmit(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "team_application_submitted"; } @RequestMapping("/join_application_submitted") public String joinTeamAppSubmit(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "join_team_application_submitted"; } @RequestMapping("/email_not_validated") public String emailNotValidated(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "email_not_validated"; } @RequestMapping("/team_application_under_review") public String teamAppUnderReview(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "team_application_under_review"; } @RequestMapping("/join_application_awaiting_approval") public String joinTeamAppAwaitingApproval(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "join_team_application_awaiting_approval"; } //--------------------------Get List of scenarios filenames-------------------------- private List<String> getScenarioFileNameList() { List<String> scenarioFileNameList = new ArrayList<String>(); File[] files = new File(SCENARIOS_DIR_PATH).listFiles(); for (File file : files) { if (file.isFile()) { scenarioFileNameList.add(file.getName()); } } return scenarioFileNameList; } //---Check if user is a team owner and has any join request waiting for approval---- private boolean hasAnyJoinRequest(HashMap<Integer, Team> teamMapOwnedByUser) { for (Map.Entry<Integer, Team> entry : teamMapOwnedByUser.entrySet()) { Team currTeam = entry.getValue(); if (currTeam.isUserJoinRequestEmpty() == false) { // at least one team has join user request return true; } } // loop through all teams but never return a single true // therefore, user's controlled teams has no join request return false; } //--------------------------MISC-------------------------- public int getSessionIdOfLoggedInUser(HttpSession session) { return Integer.parseInt(session.getAttribute(SESSION_LOGGED_IN_USER_ID).toString()); } public User2 extractUserInfo(String userJson) { User2 user2 = new User2(); // Gson g = new Gson(); // Map<String, Object> javaRootMapObject = g.fromJson(userJson, Map.class); // // for (Map.Entry<String, Object> entry : javaRootMapObject.entrySet()) // { // String key = entry.getKey(); // Object data = entry.getValue(); // // if (key.equals("id")) { // user2.setId(data.toString()); // } else if (key.equals("userDetails")) { // // // // } // } // System.out.println((Map) javaRootMapObject.get("userDetails")); // user2.setId(javaRootMapObject.get("id").toString()); // user2.setEmail(javaRootMapObject.get("email").toString()); JSONObject object = new JSONObject(userJson); JSONObject userDetails = object.getJSONObject("userDetails"); JSONObject address = userDetails.getJSONObject("address"); user2.setId(object.getString("id")); user2.setFirstName(userDetails.getString("firstName")); user2.setLastName(userDetails.getString("lastName")); user2.setEmail(userDetails.getString("email")); user2.setPhone(userDetails.getString("phone")); user2.setAddress1(address.getString("address1")); user2.setAddress2(address.getString("address2")); user2.setCountry(address.getString("country")); user2.setRegion(address.getString("region")); user2.setZipCode(address.getString("zipCode")); return user2; } }
src/main/java/sg/ncl/MainController.java
package sg.ncl; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import javax.servlet.http.HttpSession; import javax.validation.Valid; import com.fasterxml.jackson.databind.util.JSONPObject; import org.apache.tomcat.util.codec.binary.Base64; import org.json.JSONObject; import org.springframework.http.*; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.util.FileCopyUtils; import org.springframework.web.multipart.MultipartFile; import org.springframework.validation.BindingResult; import org.springframework.web.bind.annotation.ModelAttribute; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.client.RestTemplate; import org.springframework.web.servlet.mvc.support.RedirectAttributes; import sg.ncl.testbed_interface.*; /** * * Spring Controller * Direct the views to appropriate locations and invoke the respective REST API * @author yeoteye * */ @Controller public class MainController { private final String SESSION_LOGGED_IN_USER_ID = "loggedInUserId"; private final int ERROR_NO_SUCH_USER_ID = 0; private final static Logger LOGGER = Logger.getLogger(MainController.class.getName()); private int CURRENT_LOGGED_IN_USER_ID = ERROR_NO_SUCH_USER_ID; private boolean IS_USER_ADMIN = false; private TeamManager teamManager = TeamManager.getInstance(); private UserManager userManager = UserManager.getInstance(); private ExperimentManager experimentManager = ExperimentManager.getInstance(); private DomainManager domainManager = DomainManager.getInstance(); private DatasetManager datasetManager = DatasetManager.getInstance(); private NodeManager nodeManager = NodeManager.getInstance(); private String SCENARIOS_DIR_PATH = "src/main/resources/scenarios"; // private final String USERS_URI = "http://localhost:8080/users/"; // private final String AUTHENTICATION_URI = "http://localhost:8080/authentication"; private final String USERS_URI = "http://localhost:80/users/"; private final String AUTHENTICATION_URI = "http://localhost:80/authentication"; private final String USER_ID = "b0086f54-b1e1-46fe-8d95-d101d7c265ad"; @RequestMapping("/") public String index() { return "index"; } @RequestMapping(value="/login", method=RequestMethod.GET) public String login(Model model) { model.addAttribute("loginForm", new LoginForm()); return "login"; } @RequestMapping(value="/login", method=RequestMethod.POST) public String loginSubmit(@ModelAttribute("loginForm") LoginForm loginForm, Model model, HttpSession session) throws Exception { // following is to test if form fields can be retrieved via user input // pretend as though this is a server side validation try { String inputEmail = loginForm.getLoginEmail(); String inputPwd = loginForm.getLoginPassword(); RestTemplate restTemplate = new RestTemplate(); String plainCreds = inputEmail + ":" + inputPwd; byte[] plainCredsBytes = plainCreds.getBytes(); byte[] base64CredsBytes = Base64.encodeBase64(plainCredsBytes); String base64Creds = new String(base64CredsBytes); HttpHeaders headers = new HttpHeaders(); headers.set("Authorization", "Basic " + base64Creds); HttpEntity<String> request = new HttpEntity<String>("parameters", headers); ResponseEntity responseEntity = restTemplate.exchange(AUTHENTICATION_URI, HttpMethod.POST, request, String.class); // TODO call the proper validation functions if (responseEntity.getBody().toString().equals("[email protected]")) { return "redirect:/dashboard"; } } catch (Exception e) { // TODO should catch credentialsNotFound exception or a more elegant way of doing // case1: invalid login loginForm.setErrorMsg("Invalid email/password."); return "login"; } return "login"; /* String inputEmail = loginForm.getLoginEmail(); int userId = userManager.getUserIdByEmail(inputEmail); if (userManager.validateLoginDetails(loginForm.getLoginEmail(), loginForm.getLoginPassword()) == false) { // case1: invalid login loginForm.setErrorMsg("Invalid email/password."); return "login"; } else if (userManager.isEmailVerified(loginForm.getLoginEmail()) == false) { // case2: email address not validated model.addAttribute("emailAddress", loginForm.getLoginEmail()); return "redirect:/email_not_validated"; } else if (teamManager.getApprovedTeams(userId) == 0 && teamManager.getJoinRequestTeamMap2(userId) != null) { // case3 // user is not a team owner nor a team member // user has request to join a team but has not been approved by the team owner return "redirect:/join_application_awaiting_approval"; } else if (teamManager.getApprovedTeams(userId) == 0 && teamManager.getUnApprovedTeams(userId) > 0) { // case4: since it goes through case3, user must be applying for a team // team approval under review // email address is supposed to be valid here return "redirect:/team_application_under_review"; } else { // all validated // user may have no team at this point due to rejected team application or join request // must allow user to login so that user can apply again // set login CURRENT_LOGGED_IN_USER_ID = userManager.getUserIdByEmail(loginForm.getLoginEmail()); IS_USER_ADMIN = userManager.isUserAdmin(CURRENT_LOGGED_IN_USER_ID); session.setAttribute("isUserAdmin", IS_USER_ADMIN); session.setAttribute(SESSION_LOGGED_IN_USER_ID, CURRENT_LOGGED_IN_USER_ID); return "redirect:/dashboard"; } */ } @RequestMapping("/passwordreset") public String passwordreset(Model model) { model.addAttribute("loginForm", new LoginForm()); return "passwordreset"; } @RequestMapping("/dashboard") public String dashboard(Model model) { return "dashboard"; } @RequestMapping(value="/logout", method=RequestMethod.GET) public String logout(HttpSession session) { CURRENT_LOGGED_IN_USER_ID = ERROR_NO_SUCH_USER_ID; session.removeAttribute("isUserAdmin"); session.removeAttribute(SESSION_LOGGED_IN_USER_ID); return "redirect:/"; } //--------------------------Sign Up Page-------------------------- @RequestMapping(value="/signup2", method=RequestMethod.GET) public String signup2(Model model) { // TODO get each model data and put into relevant ones model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "signup2"; } @RequestMapping(value="/signup2", method=RequestMethod.POST) public String validateDetails(@ModelAttribute("loginForm") LoginForm loginForm, @ModelAttribute("signUpMergedForm") SignUpMergedForm signUpMergedForm) { // TODO get each model data and put into relevant ones // add to User model User newUser = new User(); newUser.setEmail(signUpMergedForm.getEmail()); newUser.setPassword(signUpMergedForm.getPassword()); newUser.setConfirmPassword(signUpMergedForm.getPassword()); newUser.setRole("normal"); newUser.setEmailVerified(false); newUser.setName(signUpMergedForm.getName()); newUser.setJobTitle(signUpMergedForm.getJobTitle()); newUser.setInstitution(signUpMergedForm.getInstitution()); newUser.setInstitutionAbbreviation(signUpMergedForm.getInstitutionAbbreviation()); newUser.setWebsite(signUpMergedForm.getWebsite()); newUser.setAddress1(signUpMergedForm.getAddress1()); newUser.setAddress2(signUpMergedForm.getAddress2()); newUser.setCountry(signUpMergedForm.getCountry()); newUser.setCity(signUpMergedForm.getCity()); newUser.setProvince(signUpMergedForm.getProvince()); newUser.setPostalCode(signUpMergedForm.getPostalCode()); userManager.addNewUser(newUser); int newGeneratedUserId = newUser.getUserId(); // check if user chose create new team or join existing team by checking team name String createNewTeamName = signUpMergedForm.getTeamName(); String joinNewTeamName = signUpMergedForm.getJoinTeamName(); // System.out.println("New team name: " + createNewTeamName); // System.out.println("Join existing team name: " + joinNewTeamName); if (createNewTeamName.isEmpty() == false) { // System.out.println("apply for new team"); // add to team model Team newTeam = new Team(); newTeam.setName(createNewTeamName); newTeam.setDescription(signUpMergedForm.getTeamDescription()); newTeam.setWebsite(signUpMergedForm.getTeamDescription()); newTeam.setOrganizationType(signUpMergedForm.getTeamOrganizationType()); newTeam.setIsPublic(signUpMergedForm.getIsPublic()); newTeam.setTeamOwnerId(newGeneratedUserId); newTeam.setIsApproved(false); teamManager.addNewTeam(newTeam); // redirect to application submitted return "redirect:/team_application_submitted"; } else if (joinNewTeamName.isEmpty() == false) { // System.out.println("join existing new team"); // add user request to join team int teamId = teamManager.getTeamIdByTeamName(joinNewTeamName); teamManager.addJoinRequestTeamMap2(newGeneratedUserId, teamId, userManager.getUserById(newGeneratedUserId)); // redirect to join request submitted return "redirect:/join_application_submitted"; } else { // logic error not suppose to reach here return "redirect:/signup2"; } } //--------------------------Account Settings Page-------------------------- @RequestMapping(value="/account_settings", method=RequestMethod.GET) public String accountDetails(Model model, HttpSession session) throws IOException { // TODO id should be some session variable? String userId_uri = USERS_URI + "{id}"; RestTemplate restTemplate = new RestTemplate(); String result = restTemplate.getForObject(userId_uri, String.class, USER_ID); /* User editUser = userManager.getUserById(getSessionIdOfLoggedInUser(session)); model.addAttribute("editUser", editUser); */ User2 user2 = extractUserInfo(result); model.addAttribute("editUser", user2); return "account_settings"; } @RequestMapping(value="/account_settings", method=RequestMethod.POST) public String editAccountDetails(@ModelAttribute("editUser") User2 editUser, final RedirectAttributes redirectAttributes, HttpSession session) { // Need to be this way to "edit" details // If not, the form details will overwrite existing user's details String firstName = editUser.getFirstName(); String lastName = editUser.getLastName(); JSONObject object = new JSONObject(); JSONObject userDetails = new JSONObject(); JSONObject address = new JSONObject(); userDetails.put("firstName", firstName); userDetails.put("lastName", lastName); userDetails.put("email", editUser.getEmail()); userDetails.put("phone", editUser.getPhone()); userDetails.put("address", address); address.put("address1", editUser.getAddress1()); address.put("address2", editUser.getAddress2()); address.put("country", editUser.getCountry()); address.put("region", editUser.getRegion()); address.put("zipCode", editUser.getZipCode()); object.put("userDetails", userDetails); String userId_uri = USERS_URI + USER_ID; RestTemplate restTemplate = new RestTemplate(); HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_JSON); HttpEntity<String> request = new HttpEntity<String>(object.toString(), headers); ResponseEntity responseEntity = restTemplate.exchange(userId_uri, HttpMethod.PUT, request, String.class); /* // TODO for email changes need to resend email confirmation User originalUser = userManager.getUserById(getSessionIdOfLoggedInUser(session)); String editedName = editUser.getName(); String editedPassword = editUser.getPassword(); String editedConfirmPassword = editUser.getConfirmPassword(); String editedJobTitle = editUser.getJobTitle(); String editedInstitution = editUser.getInstitution(); String editedInstitutionAbbreviation = editUser.getInstitutionAbbreviation(); String editedWebsite = editUser.getWebsite(); String editedAddress1 = editUser.getAddress1(); String editedAddress2 = editUser.getAddress2(); String editedCountry = editUser.getCountry(); String editedCity = editUser.getCity(); String editedProvince = editUser.getProvince(); String editedPostalCode = editUser.getPostalCode(); if (originalUser.updateName(editedName) == true) { redirectAttributes.addFlashAttribute("editName", "success"); } if (editedPassword.equals(editedConfirmPassword) == false) { redirectAttributes.addFlashAttribute("editPasswordMismatch", "unsuccess"); } else if (originalUser.updatePassword(editedPassword) == true) { redirectAttributes.addFlashAttribute("editPassword", "success"); } else { redirectAttributes.addFlashAttribute("editPassword", "unsuccess"); } if (originalUser.updateJobTitle(editedJobTitle) == true) { redirectAttributes.addFlashAttribute("editJobTitle", "success"); } if (originalUser.updateInstitution(editedInstitution) == true) { redirectAttributes.addFlashAttribute("editInstitution", "success"); } if (originalUser.updateInstitutionAbbreviation(editedInstitutionAbbreviation) == true) { redirectAttributes.addFlashAttribute("editInstitutionAbbreviation", "success"); } if (originalUser.updateWebsite(editedWebsite) == true) { redirectAttributes.addFlashAttribute("editWebsite", "success"); } if (originalUser.updateAddress1(editedAddress1) == true) { redirectAttributes.addFlashAttribute("editAddress1", "success"); } if (originalUser.updateAddress2(editedAddress2) == true) { redirectAttributes.addFlashAttribute("editAddress2", "success"); } if (originalUser.updateCountry(editedCountry) == true) { redirectAttributes.addFlashAttribute("editCountry", "success"); } if (originalUser.updateCity(editedCity) == true) { redirectAttributes.addFlashAttribute("editCity", "success"); } if (originalUser.updateProvince(editedProvince) == true) { redirectAttributes.addFlashAttribute("editProvince", "success"); } if (originalUser.updatePostalCode(editedPostalCode) == true) { redirectAttributes.addFlashAttribute("editPostalCode", "success"); } userManager.updateUserDetails(originalUser); return "redirect:/account_settings"; */ return "redirect:/account_settings"; } //--------------------User Side Approve Members Page------------ @RequestMapping("/approve_new_user") public String approveNewUser(Model model, HttpSession session) { HashMap<Integer, Team> rv = new HashMap<Integer, Team>(); rv = teamManager.getTeamMapByTeamOwner(getSessionIdOfLoggedInUser(session)); boolean userHasAnyJoinRequest = hasAnyJoinRequest(rv); model.addAttribute("teamMapOwnedByUser", rv); model.addAttribute("userHasAnyJoinRequest", userHasAnyJoinRequest); return "approve_new_user"; } @RequestMapping("/approve_new_user/accept/{teamId}/{userId}") public String userSideAcceptJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.acceptJoinRequest(userId, teamId); return "redirect:/approve_new_user"; } @RequestMapping("/approve_new_user/reject/{teamId}/{userId}") public String userSideRejectJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.rejectJoinRequest(userId, teamId); return "redirect:/approve_new_user"; } //--------------------------Teams Page-------------------------- @RequestMapping("/teams") public String teams(Model model, HttpSession session) { int currentLoggedInUserId = getSessionIdOfLoggedInUser(session); model.addAttribute("infoMsg", teamManager.getInfoMsg()); model.addAttribute("currentLoggedInUserId", currentLoggedInUserId); model.addAttribute("teamMap", teamManager.getTeamMap(currentLoggedInUserId)); model.addAttribute("publicTeamMap", teamManager.getPublicTeamMap()); model.addAttribute("invitedToParticipateMap2", teamManager.getInvitedToParticipateMap2(currentLoggedInUserId)); model.addAttribute("joinRequestMap2", teamManager.getJoinRequestTeamMap2(currentLoggedInUserId)); // REST Client Code // final String uri = host + "teams/?"; // RestTemplate restTemplate = new RestTemplate(); // TeamsList result = restTemplate.getForObject(uri, TeamsList.class); return "teams"; } @RequestMapping("/accept_participation/{teamId}") public String acceptParticipationRequest(@PathVariable Integer teamId, Model model, HttpSession session) { int currentLoggedInUserId = getSessionIdOfLoggedInUser(session); // get user's participation request list // add this user id to the requested list teamManager.acceptParticipationRequest(currentLoggedInUserId, teamId); // remove participation request since accepted teamManager.removeParticipationRequest(currentLoggedInUserId, teamId); // must get team name String teamName = teamManager.getTeamNameByTeamId(teamId); teamManager.setInfoMsg("You have just joined Team " + teamName + " !"); return "redirect:/teams"; } @RequestMapping("/ignore_participation/{teamId}") public String ignoreParticipationRequest(@PathVariable Integer teamId, Model model, HttpSession session) { // get user's participation request list // remove this user id from the requested list String teamName = teamManager.getTeamNameByTeamId(teamId); teamManager.ignoreParticipationRequest2(getSessionIdOfLoggedInUser(session), teamId); teamManager.setInfoMsg("You have just ignored a team request from Team " + teamName + " !"); return "redirect:/teams"; } @RequestMapping("/withdraw/{teamId}") public String withdrawnJoinRequest(@PathVariable Integer teamId, Model model, HttpSession session) { // get user team request // remove this user id from the user's request list String teamName = teamManager.getTeamNameByTeamId(teamId); teamManager.removeUserJoinRequest2(getSessionIdOfLoggedInUser(session), teamId); teamManager.setInfoMsg("You have withdrawn your join request for Team " + teamName); return "redirect:/teams"; } @RequestMapping(value="/teams/invite_members/{teamId}", method=RequestMethod.GET) public String inviteMember(@PathVariable Integer teamId, Model model) { model.addAttribute("teamIdVar", teamId); model.addAttribute("teamPageInviteMemberForm", new TeamPageInviteMemberForm()); return "team_page_invite_members"; } @RequestMapping(value="/teams/invite_members/{teamId}", method=RequestMethod.POST) public String sendInvitation(@PathVariable Integer teamId, @ModelAttribute TeamPageInviteMemberForm teamPageInviteMemberForm,Model model) { int userId = userManager.getUserIdByEmail(teamPageInviteMemberForm.getInviteUserEmail()); teamManager.addInvitedToParticipateMap(userId, teamId); return "redirect:/teams"; } @RequestMapping(value="/teams/members_approval/{teamId}", method=RequestMethod.GET) public String membersApproval(@PathVariable Integer teamId, Model model) { model.addAttribute("team", teamManager.getTeamByTeamId(teamId)); return "team_page_approve_members"; } @RequestMapping("/teams/members_approval/accept/{teamId}/{userId}") public String acceptJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.acceptJoinRequest(userId, teamId); return "redirect:/teams/members_approval/{teamId}"; } @RequestMapping("/teams/members_approval/reject/{teamId}/{userId}") public String rejectJoinRequest(@PathVariable Integer teamId, @PathVariable Integer userId) { teamManager.rejectJoinRequest(userId, teamId); return "redirect:/teams/members_approval/{teamId}"; } //--------------------------Team Profile Page-------------------------- @RequestMapping("/team_profile/{teamId}") public String teamProfile(@PathVariable Integer teamId, Model model, HttpSession session) { model.addAttribute("currentLoggedInUserId", getSessionIdOfLoggedInUser(session)); model.addAttribute("team", teamManager.getTeamByTeamId(teamId)); model.addAttribute("membersMap", teamManager.getTeamByTeamId(teamId).getMembersMap()); model.addAttribute("userManager", userManager); model.addAttribute("teamExpMap", experimentManager.getTeamExperimentsMap(teamId)); return "team_profile"; } @RequestMapping("/remove_member/{teamId}/{userId}") public String removeMember(@PathVariable Integer teamId, @PathVariable Integer userId, Model model) { // TODO check if user is indeed in the team // TODO what happens to active experiments of the user? // remove member from the team // reduce the team count teamManager.removeMembers(userId, teamId); return "redirect:/team_profile/{teamId}"; } @RequestMapping("/team_profile/{teamId}/start_experiment/{expId}") public String startExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // start experiment // ensure experiment is stopped first before starting experimentManager.startExperiment(getSessionIdOfLoggedInUser(session), expId); return "redirect:/team_profile/{teamId}"; } @RequestMapping("/team_profile/{teamId}/stop_experiment/{expId}") public String stopExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // stop experiment // ensure experiment is in ready mode before stopping experimentManager.stopExperiment(getSessionIdOfLoggedInUser(session), expId); return "redirect:/team_profile/{teamId}"; } @RequestMapping("/team_profile/{teamId}/remove_experiment/{expId}") public String removeExperimentFromTeamProfile(@PathVariable Integer teamId, @PathVariable Integer expId, Model model, HttpSession session) { // remove experiment // TODO check userid is indeed the experiment owner or team owner // ensure experiment is stopped first if (experimentManager.removeExperiment(getSessionIdOfLoggedInUser(session), expId) == true) { // decrease exp count to be display on Teams page teamManager.decrementExperimentCount(teamId); } model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); return "redirect:/team_profile/{teamId}"; } @RequestMapping(value="/team_profile/invite_user/{teamId}", method=RequestMethod.GET) public String inviteUserFromTeamProfile(@PathVariable Integer teamId, Model model) { model.addAttribute("teamIdVar", teamId); model.addAttribute("teamPageInviteMemberForm", new TeamPageInviteMemberForm()); return "team_profile_invite_members"; } @RequestMapping(value="/team_profile/invite_user/{teamId}", method=RequestMethod.POST) public String sendInvitationFromTeamProfile(@PathVariable Integer teamId, @ModelAttribute TeamPageInviteMemberForm teamPageInviteMemberForm, Model model) { int userId = userManager.getUserIdByEmail(teamPageInviteMemberForm.getInviteUserEmail()); teamManager.addInvitedToParticipateMap(userId, teamId); return "redirect:/team_profile/{teamId}"; } //--------------------------Apply for New Team Page-------------------------- @RequestMapping(value="/teams/apply_team", method=RequestMethod.GET) public String teamPageApplyTeam(Model model) { model.addAttribute("teamPageApplyTeamForm", new TeamPageApplyTeamForm()); return "team_page_apply_team"; } @RequestMapping(value="/teams/apply_team", method=RequestMethod.POST) public String checkApplyTeamInfo(@Valid TeamPageApplyTeamForm teamPageApplyTeamForm, BindingResult bindingResult) { if (bindingResult.hasErrors()) { // return "redirect:/teams/apply_team"; return "team_page_apply_team"; } // log data to ensure data has been parsed LOGGER.log(Level.INFO, "--------Apply for new team info---------"); LOGGER.log(Level.INFO, teamPageApplyTeamForm.toString()); return "redirect:/teams/team_application_submitted"; } @RequestMapping(value="/team_owner_policy", method=RequestMethod.GET) public String teamOwnerPolicy() { return "team_owner_policy"; } //--------------------------Join Team Page-------------------------- @RequestMapping(value="/teams/join_team", method=RequestMethod.GET) public String teamPageJoinTeam(Model model) { model.addAttribute("teamPageJoinTeamForm", new TeamPageJoinTeamForm()); return "team_page_join_team"; } @RequestMapping(value="/teams/join_team", method=RequestMethod.POST) public String checkJoinTeamInfo(@Valid TeamPageJoinTeamForm teamPageJoinForm, BindingResult bindingResult, Model model, HttpSession session) { if (bindingResult.hasErrors()) { return "team_page_join_team"; } // log data to ensure data has been parsed LOGGER.log(Level.INFO, "--------Join team---------"); LOGGER.log(Level.INFO, teamPageJoinForm.toString()); // perform join team request here // add to user join team list // ensure user is not already in the team or have submitted the application // add to team join request map also for members approval function User currentUser = userManager.getUserById(getSessionIdOfLoggedInUser(session)); int teamId = teamManager.getTeamIdByTeamName(teamPageJoinForm.getTeamName()); teamManager.addJoinRequestTeamMap2(getSessionIdOfLoggedInUser(session), teamId, currentUser); return "redirect:/teams/join_application_submitted/" + teamId; } //--------------------------Experiment Page-------------------------- @RequestMapping(value="/experiments", method=RequestMethod.GET) public String experiments(Model model, HttpSession session) { model.addAttribute("teamManager", teamManager); model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); return "experiments"; } @RequestMapping(value="/experiments/create", method=RequestMethod.GET) public String createExperiment(Model model, HttpSession session) { List<String> scenarioFileNameList = getScenarioFileNameList(); model.addAttribute("experiment", new Experiment()); model.addAttribute("scenarioFileNameList", scenarioFileNameList); model.addAttribute("teamMap", teamManager.getTeamMap(getSessionIdOfLoggedInUser(session))); return "experiment_page_create_experiment"; } @RequestMapping(value="/experiments/create", method=RequestMethod.POST) public String validateExperiment(@ModelAttribute Experiment experiment, Model model, HttpSession session, @RequestParam("networkConfiguration") MultipartFile networkFile, @RequestParam("dataset") MultipartFile dataFile, RedirectAttributes redirectAttributes) { // TODO Uploaded function for network configuration and optional dataset if (!networkFile.isEmpty()) { try { String networkFileName = getSessionIdOfLoggedInUser(session) + "-networkconfig-" + networkFile.getOriginalFilename(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(new File(Application.EXP_CONFIG_DIR + "/" + networkFileName))); FileCopyUtils.copy(networkFile.getInputStream(), stream); stream.close(); redirectAttributes.addFlashAttribute("message", "You successfully uploaded " + networkFile.getOriginalFilename() + "!"); // remember network file name here } catch (Exception e) { redirectAttributes.addFlashAttribute("message", "You failed to upload " + networkFile.getOriginalFilename() + " => " + e.getMessage()); return "redirect:/experiments/create"; } } if (!dataFile.isEmpty()) { try { String dataFileName = getSessionIdOfLoggedInUser(session) + "-data-" + dataFile.getOriginalFilename(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(new File(Application.EXP_CONFIG_DIR + "/" + dataFileName))); FileCopyUtils.copy(dataFile.getInputStream(), stream); stream.close(); redirectAttributes.addFlashAttribute("message2", "You successfully uploaded " + dataFile.getOriginalFilename() + "!"); // remember data file name here } catch (Exception e) { redirectAttributes.addFlashAttribute("message2", "You failed to upload " + dataFile.getOriginalFilename() + " => " + e.getMessage()); } } // add current experiment to experiment manager experimentManager.addExperiment(getSessionIdOfLoggedInUser(session), experiment); // increase exp count to be display on Teams page teamManager.incrementExperimentCount(experiment.getTeamId()); return "redirect:/experiments"; } @RequestMapping("/experiments/configuration/{expId}") public String viewExperimentConfiguration(@PathVariable Integer expId, Model model) { // get experiment from expid // retrieve the scenario contents to be displayed Experiment currExp = experimentManager.getExperimentByExpId(expId); model.addAttribute("scenarioContents", currExp.getScenarioContents()); return "experiment_scenario_contents"; } @RequestMapping("/remove_experiment/{expId}") public String removeExperiment(@PathVariable Integer expId, Model model, HttpSession session) { // remove experiment // TODO check userid is indeed the experiment owner or team owner // ensure experiment is stopped first int teamId = experimentManager.getExperimentByExpId(expId).getTeamId(); experimentManager.removeExperiment(getSessionIdOfLoggedInUser(session), expId); model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); // decrease exp count to be display on Teams page teamManager.decrementExperimentCount(teamId); return "redirect:/experiments"; } @RequestMapping("/start_experiment/{expId}") public String startExperiment(@PathVariable Integer expId, Model model, HttpSession session) { // start experiment // ensure experiment is stopped first before starting experimentManager.startExperiment(getSessionIdOfLoggedInUser(session), expId); model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); return "redirect:/experiments"; } @RequestMapping("/stop_experiment/{expId}") public String stopExperiment(@PathVariable Integer expId, Model model, HttpSession session) { // stop experiment // ensure experiment is in ready mode before stopping experimentManager.stopExperiment(getSessionIdOfLoggedInUser(session), expId); model.addAttribute("experimentList", experimentManager.getExperimentListByExperimentOwner(getSessionIdOfLoggedInUser(session))); return "redirect:/experiments"; } //---------------------------------Dataset Page-------------------------- @RequestMapping("/data") public String data(Model model, HttpSession session) { model.addAttribute("datasetOwnedByUserList", datasetManager.getDatasetContributedByUser(getSessionIdOfLoggedInUser(session))); model.addAttribute("datasetAccessibleByUserList", datasetManager.getDatasetAccessibleByuser(getSessionIdOfLoggedInUser(session))); model.addAttribute("userManager", userManager); return "data"; } @RequestMapping(value="/data/contribute", method=RequestMethod.GET) public String contributeData(Model model) { model.addAttribute("dataset", new Dataset()); File rootFolder = new File(Application.ROOT); List<String> fileNames = Arrays.stream(rootFolder.listFiles()) .map(f -> f.getName()) .collect(Collectors.toList()); model.addAttribute("files", Arrays.stream(rootFolder.listFiles()) .sorted(Comparator.comparingLong(f -> -1 * f.lastModified())) .map(f -> f.getName()) .collect(Collectors.toList()) ); return "contribute_data"; } @RequestMapping(value="/data/contribute", method=RequestMethod.POST) public String validateContributeData(@ModelAttribute("dataset") Dataset dataset, HttpSession session, @RequestParam("file") MultipartFile file, RedirectAttributes redirectAttributes) { // TODO // validation // get file from user upload to server if (!file.isEmpty()) { try { String fileName = getSessionIdOfLoggedInUser(session) + "-" + file.getOriginalFilename(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(new File(Application.ROOT + "/" + fileName))); FileCopyUtils.copy(file.getInputStream(), stream); stream.close(); redirectAttributes.addFlashAttribute("message", "You successfully uploaded " + file.getOriginalFilename() + "!"); datasetManager.addDataset(getSessionIdOfLoggedInUser(session), dataset, file.getOriginalFilename()); } catch (Exception e) { redirectAttributes.addFlashAttribute("message", "You failed to upload " + file.getOriginalFilename() + " => " + e.getMessage()); } } else { redirectAttributes.addFlashAttribute("message", "You failed to upload " + file.getOriginalFilename() + " because the file was empty"); } return "redirect:/data"; } @RequestMapping(value="/data/edit/{datasetId}", method=RequestMethod.GET) public String datasetInfo(@PathVariable Integer datasetId, Model model) { Dataset dataset = datasetManager.getDataset(datasetId); model.addAttribute("editDataset", dataset); return "edit_data"; } @RequestMapping(value="/data/edit/{datasetId}", method=RequestMethod.POST) public String editDatasetInfo(@PathVariable Integer datasetId, @ModelAttribute("editDataset") Dataset dataset, final RedirectAttributes redirectAttributes) { Dataset origDataset = datasetManager.getDataset(datasetId); String editedDatasetName = dataset.getDatasetName(); String editedDatasetDesc = dataset.getDatasetDescription(); String editedDatasetLicense = dataset.getLicense(); String editedDatasetPublic = dataset.getIsPublic(); boolean editedDatasetIsRequiredAuthorization = dataset.getRequireAuthorization(); System.out.println(origDataset.getDatasetId()); System.out.println(dataset.getDatasetId()); if (origDataset.updateName(editedDatasetName) == true) { redirectAttributes.addFlashAttribute("editName", "success"); } if (origDataset.updateDescription(editedDatasetDesc) == true) { redirectAttributes.addFlashAttribute("editDesc", "success"); } if (origDataset.updateLicense(editedDatasetLicense) == true) { redirectAttributes.addFlashAttribute("editLicense", "success"); } if (origDataset.updatePublic(editedDatasetPublic) == true) { redirectAttributes.addFlashAttribute("editPublic", "success"); } if (origDataset.updateAuthorization(editedDatasetIsRequiredAuthorization) == true) { redirectAttributes.addFlashAttribute("editIsRequiredAuthorization", "success"); } datasetManager.updateDatasetDetails(origDataset); return "redirect:/data/edit/{datasetId}"; } @RequestMapping("/data/remove_dataset/{datasetId}") public String removeDataset(@PathVariable Integer datasetId) { datasetManager.removeDataset(datasetId); return "redirect:/data"; } @RequestMapping("/data/public") public String openDataset(Model model) { model.addAttribute("publicDataMap", datasetManager.getDatasetMap()); model.addAttribute("userManager", userManager); return "data_public"; } @RequestMapping("/data/public/request_access/{dataOwnerId}") public String requestAccessForDataset(@PathVariable Integer dataOwnerId, Model model) { // TODO // send reuqest to team owner // show feedback to users User rv = userManager.getUserById(dataOwnerId); model.addAttribute("ownerName", rv.getName()); model.addAttribute("ownerEmail", rv.getEmail()); return "data_request_access"; } //---------------------------------Admin--------------------------------- @RequestMapping("/admin") public String admin(Model model) { model.addAttribute("domain", new Domain()); model.addAttribute("domainTable", domainManager.getDomainTable()); model.addAttribute("usersMap", userManager.getUserMap()); model.addAttribute("teamsMap", teamManager.getTeamMap()); model.addAttribute("teamManager", teamManager); model.addAttribute("teamsPendingApprovalMap", teamManager.getTeamsPendingApproval()); model.addAttribute("experimentMap", experimentManager.getExperimentMap2()); model.addAttribute("totalTeamCount", teamManager.getTotalTeamsCount()); model.addAttribute("totalExpCount", experimentManager.getTotalExpCount()); model.addAttribute("totalMemberCount", teamManager.getTotalMembersCount()); model.addAttribute("totalMemberAwaitingApprovalCount", teamManager.getTotalMembersAwaitingApproval()); model.addAttribute("datasetMap", datasetManager.getDatasetMap()); model.addAttribute("userManager", userManager); model.addAttribute("nodeMap", nodeManager.getNodeMap()); return "admin"; } @RequestMapping(value="/admin/domains/add", method=RequestMethod.POST) public String addDomain(@Valid Domain domain, BindingResult bindingResult) { if (bindingResult.hasErrors()) { return "redirect:/admin"; } else { domainManager.addDomains(domain.getDomainName()); } return "redirect:/admin"; } @RequestMapping("/admin/domains/remove/{domainKey}") public String removeDomain(@PathVariable String domainKey) { domainManager.removeDomains(domainKey); return "redirect:/admin"; } @RequestMapping("/admin/teams/accept/{teamId}") public String approveTeam(@PathVariable Integer teamId) { // set the approved flag to true teamManager.approveTeamApplication(teamId); return "redirect:/admin"; } @RequestMapping("/admin/teams/reject/{teamId}") public String rejectTeam(@PathVariable Integer teamId) { // need to cleanly remove the team application teamManager.rejectTeamApplication(teamId); return "redirect:/admin"; } @RequestMapping("/admin/users/ban/{userId}") public String banUser(@PathVariable Integer userId) { // TODO // perform ban action here // need to cleanly remove user info from teams, user. etc return "redirect:/admin"; } @RequestMapping("/admin/experiments/remove/{expId}") public String adminRemoveExp(@PathVariable Integer expId) { int teamId = experimentManager.getExperimentByExpId(expId).getTeamId(); experimentManager.adminRemoveExperiment(expId); // decrease exp count to be display on Teams page teamManager.decrementExperimentCount(teamId); return "redirect:/admin"; } @RequestMapping(value="/admin/data/contribute", method=RequestMethod.GET) public String adminContributeDataset(Model model) { model.addAttribute("dataset", new Dataset()); File rootFolder = new File(Application.ROOT); List<String> fileNames = Arrays.stream(rootFolder.listFiles()) .map(f -> f.getName()) .collect(Collectors.toList()); model.addAttribute("files", Arrays.stream(rootFolder.listFiles()) .sorted(Comparator.comparingLong(f -> -1 * f.lastModified())) .map(f -> f.getName()) .collect(Collectors.toList()) ); return "admin_contribute_data"; } @RequestMapping(value="/admin/data/contribute", method=RequestMethod.POST) public String validateAdminContributeDataset(@ModelAttribute("dataset") Dataset dataset, HttpSession session, @RequestParam("file") MultipartFile file, RedirectAttributes redirectAttributes) { // TODO // validation // get file from user upload to server if (!file.isEmpty()) { try { String fileName = getSessionIdOfLoggedInUser(session) + "-" + file.getOriginalFilename(); BufferedOutputStream stream = new BufferedOutputStream( new FileOutputStream(new File(Application.ROOT + "/" + fileName))); FileCopyUtils.copy(file.getInputStream(), stream); stream.close(); redirectAttributes.addFlashAttribute("message", "You successfully uploaded " + file.getOriginalFilename() + "!"); datasetManager.addDataset(getSessionIdOfLoggedInUser(session), dataset, file.getOriginalFilename()); } catch (Exception e) { redirectAttributes.addFlashAttribute("message", "You failed to upload " + file.getOriginalFilename() + " => " + e.getMessage()); } } else { redirectAttributes.addFlashAttribute("message", "You failed to upload " + file.getOriginalFilename() + " because the file was empty"); } return "redirect:/admin"; } @RequestMapping("/admin/data/remove/{datasetId}") public String adminRemoveDataset(@PathVariable Integer datasetId) { datasetManager.removeDataset(datasetId); return "redirect:/admin"; } @RequestMapping(value="/admin/node/add", method=RequestMethod.GET) public String adminAddNode(Model model) { model.addAttribute("node", new Node()); return "admin_add_node"; } @RequestMapping(value="/admin/node/add", method=RequestMethod.POST) public String adminAddNode(@ModelAttribute("node") Node node) { // TODO // validate fields, eg should be integer nodeManager.addNode(node); return "redirect:/admin"; } //--------------------------Static pages for teams-------------------------- @RequestMapping("/teams/team_application_submitted") public String teamAppSubmitFromTeamsPage() { return "team_page_application_submitted"; } @RequestMapping("/teams/join_application_submitted/{teamId}") public String teamAppJoinFromTeamsPage(@PathVariable Integer teamId, Model model) { int teamOwnerId = teamManager.getTeamByTeamId(teamId).getTeamOwnerId(); model.addAttribute("teamOwner", userManager.getUserById(teamOwnerId)); return "team_page_join_application_submitted"; } //--------------------------Static pages for sign up-------------------------- @RequestMapping("/team_application_submitted") public String teamAppSubmit(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "team_application_submitted"; } @RequestMapping("/join_application_submitted") public String joinTeamAppSubmit(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "join_team_application_submitted"; } @RequestMapping("/email_not_validated") public String emailNotValidated(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "email_not_validated"; } @RequestMapping("/team_application_under_review") public String teamAppUnderReview(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "team_application_under_review"; } @RequestMapping("/join_application_awaiting_approval") public String joinTeamAppAwaitingApproval(Model model) { model.addAttribute("loginForm", new LoginForm()); model.addAttribute("signUpMergedForm", new SignUpMergedForm()); return "join_team_application_awaiting_approval"; } //--------------------------Get List of scenarios filenames-------------------------- private List<String> getScenarioFileNameList() { List<String> scenarioFileNameList = new ArrayList<String>(); File[] files = new File(SCENARIOS_DIR_PATH).listFiles(); for (File file : files) { if (file.isFile()) { scenarioFileNameList.add(file.getName()); } } return scenarioFileNameList; } //---Check if user is a team owner and has any join request waiting for approval---- private boolean hasAnyJoinRequest(HashMap<Integer, Team> teamMapOwnedByUser) { for (Map.Entry<Integer, Team> entry : teamMapOwnedByUser.entrySet()) { Team currTeam = entry.getValue(); if (currTeam.isUserJoinRequestEmpty() == false) { // at least one team has join user request return true; } } // loop through all teams but never return a single true // therefore, user's controlled teams has no join request return false; } //--------------------------MISC-------------------------- public int getSessionIdOfLoggedInUser(HttpSession session) { return Integer.parseInt(session.getAttribute(SESSION_LOGGED_IN_USER_ID).toString()); } public User2 extractUserInfo(String userJson) { User2 user2 = new User2(); // Gson g = new Gson(); // Map<String, Object> javaRootMapObject = g.fromJson(userJson, Map.class); // // for (Map.Entry<String, Object> entry : javaRootMapObject.entrySet()) // { // String key = entry.getKey(); // Object data = entry.getValue(); // // if (key.equals("id")) { // user2.setId(data.toString()); // } else if (key.equals("userDetails")) { // // // // } // } // System.out.println((Map) javaRootMapObject.get("userDetails")); // user2.setId(javaRootMapObject.get("id").toString()); // user2.setEmail(javaRootMapObject.get("email").toString()); JSONObject object = new JSONObject(userJson); JSONObject userDetails = object.getJSONObject("userDetails"); JSONObject address = userDetails.getJSONObject("address"); user2.setId(object.getString("id")); user2.setFirstName(userDetails.getString("firstName")); user2.setLastName(userDetails.getString("lastName")); user2.setEmail(userDetails.getString("email")); user2.setPhone(userDetails.getString("phone")); user2.setAddress1(address.getString("address1")); user2.setAddress2(address.getString("address2")); user2.setCountry(address.getString("country")); user2.setRegion(address.getString("region")); user2.setZipCode(address.getString("zipCode")); return user2; } }
refactor code (#DEV-237)
src/main/java/sg/ncl/MainController.java
refactor code (#DEV-237)
<ide><path>rc/main/java/sg/ncl/MainController.java <ide> import javax.servlet.http.HttpSession; <ide> import javax.validation.Valid; <ide> <del> <del>import com.fasterxml.jackson.databind.util.JSONPObject; <ide> import org.apache.tomcat.util.codec.binary.Base64; <ide> import org.json.JSONObject; <ide> import org.springframework.http.*; <ide> // Need to be this way to "edit" details <ide> // If not, the form details will overwrite existing user's details <ide> <del> String firstName = editUser.getFirstName(); <del> String lastName = editUser.getLastName(); <del> <ide> JSONObject object = new JSONObject(); <ide> JSONObject userDetails = new JSONObject(); <ide> JSONObject address = new JSONObject(); <ide> <del> userDetails.put("firstName", firstName); <del> userDetails.put("lastName", lastName); <add> userDetails.put("firstName", editUser.getFirstName()); <add> userDetails.put("lastName", editUser.getLastName()); <ide> userDetails.put("email", editUser.getEmail()); <ide> userDetails.put("phone", editUser.getPhone()); <ide> userDetails.put("address", address);
Java
mit
133871736a2ff92e9bd6395bc9c0a11a36a9f849
0
fredyw/leetcode,fredyw/leetcode,fredyw/leetcode,fredyw/leetcode
package leetcode; import java.util.Arrays; /** * https://leetcode.com/problems/pour-water/ */ public class Problem756 { public int[] pourWater(int[] heights, int V, int K) { outer: for (int i = 0; i < V; i++) { // left for (int j = K; j >= 0; j--) { int left = (j - 1 < 0) ? Integer.MIN_VALUE : heights[j - 1]; if (heights[j] < left) { heights[j]++; continue outer; } } // right for (int j = K; j < heights.length; j++) { int right = (j + 1 < heights.length) ? heights[j + 1] : Integer.MIN_VALUE; if (heights[j] < right) { heights[j]++; continue outer; } } heights[K]++; } return heights; } public static void main(String[] args) { Problem756 prob = new Problem756(); // System.out.println(Arrays.toString(prob.pourWater(new int[]{2, 1, 2, 2, 1, 2, 2}, 1, 3))); // [2,2,2,2,1,2,2] // System.out.println(Arrays.toString(prob.pourWater(new int[]{2, 1, 1, 2, 1, 2, 2}, 4, 3))); // [2,2,2,3,2,2,2] System.out.println(Arrays.toString(prob.pourWater(new int[]{2, 1, 1, 2, 1, 2, 2}, 1, 3))); // [2,1,2,2,1,2,2] // System.out.println(Arrays.toString(prob.pourWater(new int[]{1, 2, 3, 4}, 2, 2))); // [2,3,3,4] // System.out.println(Arrays.toString(prob.pourWater(new int[]{3, 1, 3}, 5, 1))); // [4,4,4] System.out.println(Arrays.toString(prob.pourWater(new int[]{2, 1, 2, 3, 1, 2, 2}, 1, 3))); // [2,2,2,3,1,2,2] } }
src/main/java/leetcode/Problem756.java
package leetcode; import java.util.Arrays; /** * https://leetcode.com/problems/pour-water/ */ public class Problem756 { public int[] pourWater(int[] heights, int V, int K) { outer: for (int i = 0; i < V; i++) { // left for (int j = K; j >= 0; j--) { int left = (j - 1 < 0) ? Integer.MIN_VALUE : heights[j - 1]; if (heights[j] < left) { heights[j]++; continue outer; } } // right for (int j = K; j < heights.length; j++) { int right = (j + 1 < heights.length) ? heights[j + 1] : Integer.MIN_VALUE; if (heights[j] < right) { heights[j]++; continue outer; } } heights[K]++; } return heights; } public static void main(String[] args) { Problem756 prob = new Problem756(); // System.out.println(Arrays.toString(prob.pourWater(new int[] { 2, 1, 2, 2, 1, 2, 2 }, 1, 3))); // [2,2,2,2,1,2,2] // System.out.println(Arrays.toString(prob.pourWater(new int[] { 2, 1, 1, 2, 1, 2, 2 }, 4, 3))); // [2,2,2,3,2,2,2] System.out.println(Arrays.toString(prob.pourWater(new int[] { 2, 1, 1, 2, 1, 2, 2 }, 1, 3))); // [2,1,2,2,1,2,2] // System.out.println(Arrays.toString(prob.pourWater(new int[] { 1, 2, 3, 4 }, 2, 2))); // [2,3,3,4] // System.out.println(Arrays.toString(prob.pourWater(new int[] { 3, 1, 3 }, 5, 1))); // [4,4,4] } }
Update problem 756
src/main/java/leetcode/Problem756.java
Update problem 756
<ide><path>rc/main/java/leetcode/Problem756.java <ide> <ide> public static void main(String[] args) { <ide> Problem756 prob = new Problem756(); <del>// System.out.println(Arrays.toString(prob.pourWater(new int[] { 2, 1, 2, 2, 1, 2, 2 }, 1, 3))); // [2,2,2,2,1,2,2] <del>// System.out.println(Arrays.toString(prob.pourWater(new int[] { 2, 1, 1, 2, 1, 2, 2 }, 4, 3))); // [2,2,2,3,2,2,2] <del> System.out.println(Arrays.toString(prob.pourWater(new int[] { 2, 1, 1, 2, 1, 2, 2 }, 1, 3))); // [2,1,2,2,1,2,2] <del>// System.out.println(Arrays.toString(prob.pourWater(new int[] { 1, 2, 3, 4 }, 2, 2))); // [2,3,3,4] <del>// System.out.println(Arrays.toString(prob.pourWater(new int[] { 3, 1, 3 }, 5, 1))); // [4,4,4] <add>// System.out.println(Arrays.toString(prob.pourWater(new int[]{2, 1, 2, 2, 1, 2, 2}, 1, 3))); // [2,2,2,2,1,2,2] <add>// System.out.println(Arrays.toString(prob.pourWater(new int[]{2, 1, 1, 2, 1, 2, 2}, 4, 3))); // [2,2,2,3,2,2,2] <add> System.out.println(Arrays.toString(prob.pourWater(new int[]{2, 1, 1, 2, 1, 2, 2}, 1, 3))); // [2,1,2,2,1,2,2] <add>// System.out.println(Arrays.toString(prob.pourWater(new int[]{1, 2, 3, 4}, 2, 2))); // [2,3,3,4] <add>// System.out.println(Arrays.toString(prob.pourWater(new int[]{3, 1, 3}, 5, 1))); // [4,4,4] <add> System.out.println(Arrays.toString(prob.pourWater(new int[]{2, 1, 2, 3, 1, 2, 2}, 1, 3))); // [2,2,2,3,1,2,2] <ide> } <ide> }
Java
mit
error: pathspec 'Java/array2/ZeroMaxTest.java' did not match any file(s) known to git
c7641dc73e9c698528ca69d5e727adf9e919cb34
1
RCoon/CodingBat,RCoon/CodingBat
package array2; import java.util.Arrays; public class ZeroMaxTest { public static void main(String[] args) { ZeroMaxTest test = new ZeroMaxTest(); System.out.println(Arrays.toString(test.zeroMax(new int[] { 0, 5, 0, 3 }))); System.out.println(Arrays.toString(test.zeroMax(new int[] { 0, 4, 0, 3 }))); System.out.println(Arrays.toString(test.zeroMax(new int[] { 0, 1, 0 }))); } public int[] zeroMax(int[] nums) { if (nums.length < 2) return nums; int[] array = new int[nums.length]; int maxOdd = 0; for (int i = nums.length - 1; i > -1; i--) { if (nums[i] % 2 != 0 && nums[i] > maxOdd) { maxOdd = nums[i]; } if (nums[i] != 0) { array[i] = nums[i]; } else { array[i] = maxOdd; } } return array; } }
Java/array2/ZeroMaxTest.java
Add ZeroMaxTest
Java/array2/ZeroMaxTest.java
Add ZeroMaxTest
<ide><path>ava/array2/ZeroMaxTest.java <add>package array2; <add> <add>import java.util.Arrays; <add> <add>public class ZeroMaxTest { <add> <add> public static void main(String[] args) { <add> <add> ZeroMaxTest test = new ZeroMaxTest(); <add> System.out.println(Arrays.toString(test.zeroMax(new int[] { 0, 5, 0, 3 }))); <add> System.out.println(Arrays.toString(test.zeroMax(new int[] { 0, 4, 0, 3 }))); <add> System.out.println(Arrays.toString(test.zeroMax(new int[] { 0, 1, 0 }))); <add> } <add> <add> public int[] zeroMax(int[] nums) { <add> if (nums.length < 2) <add> return nums; <add> <add> int[] array = new int[nums.length]; <add> <add> int maxOdd = 0; <add> <add> for (int i = nums.length - 1; i > -1; i--) { <add> if (nums[i] % 2 != 0 && nums[i] > maxOdd) { <add> maxOdd = nums[i]; <add> } <add> if (nums[i] != 0) { <add> array[i] = nums[i]; <add> } else { <add> array[i] = maxOdd; <add> } <add> } <add> return array; <add> } <add>}
Java
bsd-3-clause
68c002187bc5c4bfc104119803009d14db7718df
0
compgen-io/cgpipe,compgen-io/cgpipe
package io.compgen.cgpipe.runner; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.nio.file.AccessMode; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import io.compgen.cgpipe.CGPipe; import io.compgen.cgpipe.exceptions.ASTExecException; import io.compgen.cgpipe.exceptions.ASTParseException; import io.compgen.cgpipe.exceptions.RunnerException; import io.compgen.cgpipe.exceptions.VarTypeException; import io.compgen.cgpipe.loader.NumberedLine; import io.compgen.cgpipe.parser.context.RootContext; import io.compgen.cgpipe.parser.target.BuildTarget; import io.compgen.cgpipe.parser.variable.VarBool; import io.compgen.cgpipe.parser.variable.VarList; import io.compgen.cgpipe.parser.variable.VarString; import io.compgen.cgpipe.parser.variable.VarValue; import io.compgen.cgpipe.runner.joblog.JobLog; import io.compgen.cgpipe.runner.joblog.JobLogRecord; import io.compgen.cgpipe.support.StreamRedirect; import io.compgen.common.MapBuilder; import io.compgen.common.StringUtils; public abstract class JobRunner { abstract public boolean submit(JobDef jobdef) throws RunnerException; abstract public boolean isJobIdValid(String jobId) throws RunnerException; abstract public void runnerDone() throws RunnerException; abstract protected void setConfig(String k, VarValue varValue); public static String defaultShell = null; static { for (String path: new String[] {"/bin/bash", "/usr/bin/bash", "/usr/local/bin/bash", "/bin/sh"}) { if (new File(path).exists()) { defaultShell=path; break; } } } static protected Log log = LogFactory.getLog(JobRunner.class); protected boolean dryrun = false; protected boolean done = false; protected JobLog joblog = null; protected Map<String, JobDependency> submittedJobs = new HashMap<String, JobDependency>(); // key = output-file, value = job-id protected List<JobDependency> submittedJobDefs = new ArrayList<JobDependency>(); protected RootContext rootContext = null; protected JobDef setupJob = null; protected List<NumberedLine> prelines=null; protected List<NumberedLine> postlines=null; // private List<NumberedLine> postSubmitLines=null; protected List<String> outputFilesSubmitted = new ArrayList<String>(); protected List<String> tempOutputFilesSubmitted = new ArrayList<String>(); public static JobRunner load(RootContext cxt) throws RunnerException { boolean dryrun = (cxt.get("cgpipe.dryrun") == VarBool.TRUE); String runner = cxt.getString("cgpipe.runner"); if (runner == null) { runner = "shell"; } if (cxt.contains("cgpipe.shell")) { defaultShell = cxt.getString("cgpipe.shell"); } JobRunner.log.info("job-runner: " +runner); JobRunner obj = null; switch (runner) { case "shell": case "bash": case "sh": obj = new ShellScriptRunner(); break; case "sge": obj = new SGETemplateRunner(); break; case "slurm": obj = new SLURMTemplateRunner(); break; case "pbs": obj = new PBSTemplateRunner(); break; case "sbs": obj = new SBSTemplateRunner(); break; case "graphviz": obj = new GraphvizRunner(); break; default: throw new RunnerException("Can't load job runner: "+runner +" (valid options: shell, sge, slurm, pbs, sjq, graphviz)"); } obj.rootContext = cxt; String prefix = "cgpipe.runner."+runner; Map<String, VarValue> cxtvals = cxt.cloneValues(prefix); for (String k: cxtvals.keySet()) { obj.setConfig(k, cxtvals.get(k)); } obj.dryrun = dryrun; // Attempt to load a list of existing jobs String joblogFilename = cxt.getString("cgpipe.joblog"); JobRunner.log.info("job-log: " +joblogFilename); if (joblogFilename != null) { // try { JobLog jl = null; try { jl = JobLog.open(joblogFilename); } catch (IOException e) { throw new RunnerException(e); } for (String output: jl.getOutputJobIds().keySet()) { String jobId = jl.getOutputJobIds().get(output); String absOutput = Paths.get(output).toAbsolutePath().toString(); obj.submittedJobs.put(absOutput, new ExistingJob(jobId)); cxt.getRoot().addPendingJobOutput(absOutput, jobId, obj); log.trace("Existing/pending output: "+ absOutput); } // File jobfile = new File(joblog); // if (jobfile.exists()) { // BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(joblog))); // String line; // while ((line = reader.readLine()) != null) { // String[] cols = line.split("\t"); // if (cols[1].equals("OUTPUT")) { // String absOutput = Paths.get(cols[2]).toAbsolutePath().toString(); // obj.submittedJobs.put(absOutput, new ExistingJob(cols[0])); // cxt.getRoot().addPendingJobOutput(absOutput, cols[0], obj); // log.trace("Existing/pending output: "+ absOutput); // } // } // reader.close(); // } else if (jobfile.getParentFile() != null && !jobfile.getParentFile().exists()) { // jobfile.getParentFile().mkdirs(); // } obj.joblog = jl; JobRunner.log.debug("done reading job-log: " +joblogFilename); // new PrintStream(new FileOutputStream(joblog, true)); // } catch (IOException e) { // throw new RunnerException(e); // } } return obj; } public void abort() { } protected void shexec(JobDef jobdef) throws RunnerException { if (dryrun) { System.err.println("[dryrun." + jobdef.getSafeName() +"]"); for (String line: jobdef.getBody().split("\n")) { System.err.println("> " + line); } } else { try { log.trace("shexec: "+jobdef.getSafeName()); Process proc = Runtime.getRuntime().exec(new String[] { defaultShell }); proc.getOutputStream().write(jobdef.getBody().getBytes(Charset.forName("UTF8"))); proc.getOutputStream().close(); InputStream is = proc.getInputStream(); InputStream es = proc.getErrorStream(); StreamRedirect t1 = new StreamRedirect(is, System.out); t1.start(); StreamRedirect t2 = new StreamRedirect(es, System.err); t2.start(); int retcode = proc.waitFor(); t1.join(); t2.join(); log.trace("retcode: "+retcode); is.close(); es.close(); if (retcode != 0) { throw new RunnerException("Error running job via shexec: "+jobdef.getName()+" $? = "+retcode+"\n"+jobdef.getBody()); } } catch (IOException | InterruptedException e) { throw new RunnerException(e); } } } // protected void shexec(String src) throws RunnerException { // try { // Process proc = Runtime.getRuntime().exec(new String[] { defaultShell }); // proc.getOutputStream().write(src.getBytes(Charset.forName("UTF8"))); // proc.getOutputStream().close(); // // InputStream is = proc.getInputStream(); // InputStream es = proc.getErrorStream(); // // StreamRedirect t1 = new StreamRedirect(is, System.out); // t1.start(); // // StreamRedirect t2 = new StreamRedirect(is, System.err); // t2.start(); // // int retcode = proc.waitFor(); // t1.join(); // t2.join(); // // log.trace("retcode: "+retcode); // // is.close(); // es.close(); // // // don't close stdout/stderr, it stops the program. // //fout.close(); // //ferr.close(); // // if (retcode != 0) { // throw new RunnerException("Error running script!"); // } // // } catch (IOException | InterruptedException e) { // throw new RunnerException(e); // } // } private List<NumberedLine> getLinesForTarget(String name, RootContext context, boolean allowMissing) { BuildTarget tgt = context.build(name, allowMissing); List<NumberedLine> lines = null; if (tgt != null) { lines = tgt.getLines(); } return lines; } private void setup(RootContext context) throws RunnerException { if (setupJob == null) { BuildTarget setupTgt = context.build("__setup__", true); if (setupTgt != null) { try { setupJob = setupTgt.eval(null, null, context); if (setupJob.getSettingBool("job.shexec", false)) { shexec(setupJob); } else { submit(setupJob); if (setupJob.getJobId() == null) { abort(); log.error("Error submitting job: __setup__"); throw new RunnerException("Error submitting job: __setup__"); } postSubmit(setupJob, context); } } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } } prelines = getLinesForTarget("__pre__", context, true); postlines = getLinesForTarget("__post__", context, true); } } public void submitAll(BuildTarget initialTarget, RootContext context) throws RunnerException { if (initialTarget.getOutputs() != null && initialTarget.getOutputs().size() > 0) { setup(context); markSkippable(initialTarget, context, initialTarget.getOutputs().get(0)); submitTargets(initialTarget, context, initialTarget.getOutputs().get(0), true); } runOpportunistic(context); } private void runOpportunistic(RootContext context) throws RunnerException { List<BuildTarget> opp = context.getOpportunistic(); boolean foundAll = true; for (BuildTarget tgt: opp) { for (String k: tgt.getDepends().keySet()) { // if the file isn't skippable (exists on disk) if (!tgt.getDepends().get(k).isSkippable()) { // if we haven't submitted the job if (tgt.getDepends().get(k).getJobDep() == null) { // if the job hasn't been previously scheduled if (findJobProviding(k) == null) { // we can't run this opportunistic job foundAll = false; continue; } } } } if (!foundAll) { continue; } try { JobDef job = tgt.eval(null, null, context); if (job.getSettingBool("job.shexec", false)) { shexec(job); } else { submit(job); } } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } } } private long markSkippable(BuildTarget target, RootContext context, String outputName) throws RunnerException { long lastModified = 0; String lastModifiedDep = ""; log.debug("MARKING SKIPPABLE FOR: "+ target); for (String dep: target.getDepends().keySet()) { long depLastMod = markSkippable(target.getDepends().get(dep), context, dep); log.debug(" Checking dep: " + dep + " lastmod: "+depLastMod); if (depLastMod == -1) { lastModified = -1; } else if (depLastMod > lastModified && lastModified > -1) { lastModified = depLastMod; lastModifiedDep = dep; } } log.debug("LAST MODIFIED: "+ target + " => " + lastModified); long retval = 0; if (lastModified > -1) { // Check to see if the outputName file exists on disk. // Note: this could also be used to look for remote resources (S3, etc), but not implemented for (String allout: target.getOutputs()) { File outputFile = new File(allout); // TODO: This can fail for NFS mounted folders -- Add an extra check here for IOExceptions? outputFile.toPath().checkAccess()?? if (outputFile.exists()) { if (outputFile.lastModified() >= lastModified) { log.debug(" Marking output-target as skippable: "+allout); target.setSkippable(allout); if (retval != -1 && outputFile.lastModified() > retval) { retval = outputFile.lastModified(); } } else { log.debug(" Marking output-target as not skippable: " + allout + " is older than " + lastModifiedDep + " (" + outputFile.lastModified() + " vs " + lastModified + ")"); retval = -1; } } else { if (target.getTempOutputs().contains(allout)) { log.debug(outputFile + " is a tmp file -- we can skip this (assuming downstream files are older than: "+lastModified+")"); return lastModified; } else { log.debug(" Marking output-target as not skippable: " + allout + " doesn't exist! (" + outputFile.getAbsolutePath()+")"); retval = -1; } } } } else { log.debug(" Marking output-target as not skippable: "+outputName + " a dependency will be built"); retval = -1; } log.debug("DONE: "+ target + " => " + retval); return retval; } private JobDependency submitTargets(BuildTarget target, RootContext context, String outputName, boolean isRoot) throws RunnerException { log.trace("Submitting target: "+outputName); // Can we skip this target (file exists) if (target.isSkippable()) { log.trace("Skipping target: "+outputName); return null; } // Has it already been submitted in another part of the tree? if (target.getJobDep() != null) { log.trace("Skipping target (already submitted): "+outputName); return target.getJobDep(); } // Have we already submitted this job in a prior run? JobDependency depJob = findJobProviding(outputName); if (depJob != null) { log.trace("Skipping target (job queued): "+outputName); return depJob; } // Okay... we are submitting this job, start with submitting it's dependencies... List<JobDependency> deps = new ArrayList<JobDependency>(); try { JobDef job = target.eval(prelines, postlines, context); if (job != null) { boolean blankRoot = false; if (isRoot) { String tmp = job.getBody().replaceAll("[ \\t\\r\\n]", ""); if (tmp.equals("")) { blankRoot = true; } } for (String out: target.getDepends().keySet()) { log.info("Submitting dependency: "+out); JobDependency dep = submitTargets(target.getDepends().get(out), context, out, blankRoot); if (dep != null) { deps.add(dep); } else { log.debug("Dependency not found?: "+out); } } job.addDependencies(deps); if (setupJob != null && setupJob.getJobId() != null) { job.addDependency(setupJob); } if (!blankRoot) { if (job.getDependencies().size()==0 && job.getSettingBool("job.shexec", false)) { shexec(job); } else { submit(job); if (job.getJobId() == null) { abort(); log.error("Error submitting job: "+ target); throw new RunnerException("Error submitting job: "+job); } postSubmit(job, context); this.outputFilesSubmitted.addAll(target.getOutputs()); this.tempOutputFilesSubmitted.addAll(target.getTempOutputs()); for (String out: target.getOutputs()) { submittedJobs.put(Paths.get(out).toAbsolutePath().toString(), job); } } } else { log.debug("Skipping empty target: "+target); job.setJobId(""); } target.setSubmittedJobDep(job); this.submittedJobDefs.add(job); } else { log.debug("Empty job for target: "+target); } return job; } catch (ASTParseException | ASTExecException e) { abort(); throw new RunnerException(e); } } private JobDependency findJobProviding(String input) throws RunnerException { log.trace("Looking for output: "+ input); String absInput = Paths.get(input).toAbsolutePath().toString(); if (submittedJobs.containsKey(absInput)) { log.debug("Found existing job providing: "+ absInput + " ("+submittedJobs.get(absInput).getJobId()+")"); JobDependency job = submittedJobs.get(absInput); if (isJobIdValid(job.getJobId())) { return job; } log.debug("Existing job: "+ job.getJobId()+" is no longer valid... resubmitting"); submittedJobs.remove(absInput); } return null; } public void done() throws RunnerException { // look for a __teardown__ target and execute if found. JobDef teardown = null; // TODO: Move this lower? And add all of the job defs to the context? // (Like -- show the final outputs and temp. files...) BuildTarget tdTgt = rootContext.build("__teardown__", true); if (tdTgt!=null) { boolean teardownBlank = false; try { //System.err.println("ALL OUTPUTS : "+StringUtils.join(",",outputFilesSubmitted)); //System.err.println("TEMP-OUTPUTS: "+StringUtils.join(",",tempOutputFilesSubmitted)); MapBuilder<String, VarValue> mb = new MapBuilder<String, VarValue>(); VarString[] tmpFileVar = new VarString[tempOutputFilesSubmitted.size()]; for (int i=0; i<tempOutputFilesSubmitted.size(); i++) { tmpFileVar[i] = new VarString(tempOutputFilesSubmitted.get(i)); } try { mb.put("cgpipe.tmpfiles", new VarList(tmpFileVar)); } catch (VarTypeException e) { throw new RunnerException(e); } VarString[] tmpFileVar2 = new VarString[outputFilesSubmitted.size()]; for (int i=0; i<outputFilesSubmitted.size(); i++) { tmpFileVar2[i] = new VarString(outputFilesSubmitted.get(i)); } try { mb.put("cgpipe.outputfiles", new VarList(tmpFileVar2)); } catch (VarTypeException e) { throw new RunnerException(e); } teardown = tdTgt.eval(null, null, rootContext, mb.build()); String tmp = teardown.getBody().replaceAll("[ \\t\\r\\n]", ""); if (tmp.equals("")) { teardownBlank = true; } } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } if (!teardownBlank) { if (teardown.getSettingBool("job.shexec", false)) { shexec(teardown); } else { teardown.addDependencies(submittedJobDefs); if (setupJob != null && setupJob.getJobId() != null) { teardown.addDependency(setupJob); } submit(teardown); if (teardown.getJobId() == null) { abort(); log.error("Error submitting job: __teardown__"); throw new RunnerException("Error submitting job: __teardown__"); } postSubmit(teardown, rootContext); } } } runnerDone(); if (joblog!=null) { joblog.close(); } } protected void logJob(JobDef job) { log.info("Submitted job: "+job.getJobId() +" "+ job.getName()); for (String k:job.getSettings()) { if (k.startsWith("job.")) { log.debug("setting: "+k+" => "+job.getSetting(k)); } } for (String out:job.getOutputs()) { log.debug("output: "+out); } for (String inp:job.getInputs()) { log.debug("input: "+inp); } for (String s: job.getBody().split("\n")) { log.debug("src: "+StringUtils.strip(s)); } if (!dryrun && joblog != null && job.getJobId() != null && !job.getJobId().equals("")) { JobLogRecord rec = new JobLogRecord(job.getJobId()); rec.setPipeline(CGPipe.getFilename()); rec.setWorkingDirectory(CGPipe.getWorkingDirectory()); rec.setName(job.getName()); rec.setSubmitTime(System.currentTimeMillis()); rec.setUser(System.getProperty("user.name")); for (JobDependency dep:job.getDependencies()) { if (job.getJobId()!=null && !job.getJobId().equals("")) { rec.addDep(dep.getJobId()); } } for (String out:job.getOutputs()) { rec.addOutput(out); } for (String inp:job.getInputs()) { rec.addInput(inp); } for (String s: job.getBody().split("\n")) { rec.addSrcLine(s); } for (String k:job.getSettings()) { if (k.startsWith("job.")) { if (k.equals("job.custom")) { for (String s: job.getSettings("job.custom")) { rec.addSetting(k, s); } } else { rec.addSetting(k, job.getSetting(k)); } } } joblog.writeRecord(rec); } } public void postSubmit(JobDef jobdef, RootContext context) throws RunnerException { BuildTarget postSubmitTgt = context.build("__postsubmit__", true); if (postSubmitTgt != null) { try { RootContext jobRoot = new RootContext(); for (String setting: jobdef.getSettings()) { if (setting.startsWith("job.")) { jobRoot.set(setting, jobdef.getSettingsMap().get(setting)); } } jobRoot.set("job.id", new VarString(jobdef.getJobId())); String deps = ""; for (JobDependency dep: jobdef.getDependencies()) { if (!deps.equals("")) { deps += ":"; } deps += dep.getJobId(); } jobRoot.set("job.depids", new VarString(deps)); JobDef postSubmit = postSubmitTgt.eval(null, null, null, jobRoot.cloneValues()); //System.err.println(postSubmit.getBody()); shexec(postSubmit); } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } } } }
src/java/io/compgen/cgpipe/runner/JobRunner.java
package io.compgen.cgpipe.runner; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import io.compgen.cgpipe.CGPipe; import io.compgen.cgpipe.exceptions.ASTExecException; import io.compgen.cgpipe.exceptions.ASTParseException; import io.compgen.cgpipe.exceptions.RunnerException; import io.compgen.cgpipe.exceptions.VarTypeException; import io.compgen.cgpipe.loader.NumberedLine; import io.compgen.cgpipe.parser.context.RootContext; import io.compgen.cgpipe.parser.target.BuildTarget; import io.compgen.cgpipe.parser.variable.VarBool; import io.compgen.cgpipe.parser.variable.VarList; import io.compgen.cgpipe.parser.variable.VarString; import io.compgen.cgpipe.parser.variable.VarValue; import io.compgen.cgpipe.runner.joblog.JobLog; import io.compgen.cgpipe.runner.joblog.JobLogRecord; import io.compgen.cgpipe.support.StreamRedirect; import io.compgen.common.MapBuilder; import io.compgen.common.StringUtils; public abstract class JobRunner { abstract public boolean submit(JobDef jobdef) throws RunnerException; abstract public boolean isJobIdValid(String jobId) throws RunnerException; abstract public void runnerDone() throws RunnerException; abstract protected void setConfig(String k, VarValue varValue); public static String defaultShell = null; static { for (String path: new String[] {"/bin/bash", "/usr/bin/bash", "/usr/local/bin/bash", "/bin/sh"}) { if (new File(path).exists()) { defaultShell=path; break; } } } static protected Log log = LogFactory.getLog(JobRunner.class); protected boolean dryrun = false; protected boolean done = false; protected JobLog joblog = null; protected Map<String, JobDependency> submittedJobs = new HashMap<String, JobDependency>(); // key = output-file, value = job-id protected List<JobDependency> submittedJobDefs = new ArrayList<JobDependency>(); protected RootContext rootContext = null; protected JobDef setupJob = null; protected List<NumberedLine> prelines=null; protected List<NumberedLine> postlines=null; // private List<NumberedLine> postSubmitLines=null; protected List<String> outputFilesSubmitted = new ArrayList<String>(); protected List<String> tempOutputFilesSubmitted = new ArrayList<String>(); public static JobRunner load(RootContext cxt) throws RunnerException { boolean dryrun = (cxt.get("cgpipe.dryrun") == VarBool.TRUE); String runner = cxt.getString("cgpipe.runner"); if (runner == null) { runner = "shell"; } if (cxt.contains("cgpipe.shell")) { defaultShell = cxt.getString("cgpipe.shell"); } JobRunner.log.info("job-runner: " +runner); JobRunner obj = null; switch (runner) { case "shell": case "bash": case "sh": obj = new ShellScriptRunner(); break; case "sge": obj = new SGETemplateRunner(); break; case "slurm": obj = new SLURMTemplateRunner(); break; case "pbs": obj = new PBSTemplateRunner(); break; case "sbs": obj = new SBSTemplateRunner(); break; case "graphviz": obj = new GraphvizRunner(); break; default: throw new RunnerException("Can't load job runner: "+runner +" (valid options: shell, sge, slurm, pbs, sjq, graphviz)"); } obj.rootContext = cxt; String prefix = "cgpipe.runner."+runner; Map<String, VarValue> cxtvals = cxt.cloneValues(prefix); for (String k: cxtvals.keySet()) { obj.setConfig(k, cxtvals.get(k)); } obj.dryrun = dryrun; // Attempt to load a list of existing jobs String joblogFilename = cxt.getString("cgpipe.joblog"); JobRunner.log.info("job-log: " +joblogFilename); if (joblogFilename != null) { // try { JobLog jl = null; try { jl = JobLog.open(joblogFilename); } catch (IOException e) { throw new RunnerException(e); } for (String output: jl.getOutputJobIds().keySet()) { String jobId = jl.getOutputJobIds().get(output); String absOutput = Paths.get(output).toAbsolutePath().toString(); obj.submittedJobs.put(absOutput, new ExistingJob(jobId)); cxt.getRoot().addPendingJobOutput(absOutput, jobId, obj); log.trace("Existing/pending output: "+ absOutput); } // File jobfile = new File(joblog); // if (jobfile.exists()) { // BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(joblog))); // String line; // while ((line = reader.readLine()) != null) { // String[] cols = line.split("\t"); // if (cols[1].equals("OUTPUT")) { // String absOutput = Paths.get(cols[2]).toAbsolutePath().toString(); // obj.submittedJobs.put(absOutput, new ExistingJob(cols[0])); // cxt.getRoot().addPendingJobOutput(absOutput, cols[0], obj); // log.trace("Existing/pending output: "+ absOutput); // } // } // reader.close(); // } else if (jobfile.getParentFile() != null && !jobfile.getParentFile().exists()) { // jobfile.getParentFile().mkdirs(); // } obj.joblog = jl; JobRunner.log.debug("done reading job-log: " +joblogFilename); // new PrintStream(new FileOutputStream(joblog, true)); // } catch (IOException e) { // throw new RunnerException(e); // } } return obj; } public void abort() { } protected void shexec(JobDef jobdef) throws RunnerException { if (dryrun) { System.err.println("[dryrun." + jobdef.getSafeName() +"]"); for (String line: jobdef.getBody().split("\n")) { System.err.println("> " + line); } } else { try { log.trace("shexec: "+jobdef.getSafeName()); Process proc = Runtime.getRuntime().exec(new String[] { defaultShell }); proc.getOutputStream().write(jobdef.getBody().getBytes(Charset.forName("UTF8"))); proc.getOutputStream().close(); InputStream is = proc.getInputStream(); InputStream es = proc.getErrorStream(); StreamRedirect t1 = new StreamRedirect(is, System.out); t1.start(); StreamRedirect t2 = new StreamRedirect(es, System.err); t2.start(); int retcode = proc.waitFor(); t1.join(); t2.join(); log.trace("retcode: "+retcode); is.close(); es.close(); if (retcode != 0) { throw new RunnerException("Error running job via shexec: "+jobdef.getName()+" $? = "+retcode+"\n"+jobdef.getBody()); } } catch (IOException | InterruptedException e) { throw new RunnerException(e); } } } // protected void shexec(String src) throws RunnerException { // try { // Process proc = Runtime.getRuntime().exec(new String[] { defaultShell }); // proc.getOutputStream().write(src.getBytes(Charset.forName("UTF8"))); // proc.getOutputStream().close(); // // InputStream is = proc.getInputStream(); // InputStream es = proc.getErrorStream(); // // StreamRedirect t1 = new StreamRedirect(is, System.out); // t1.start(); // // StreamRedirect t2 = new StreamRedirect(is, System.err); // t2.start(); // // int retcode = proc.waitFor(); // t1.join(); // t2.join(); // // log.trace("retcode: "+retcode); // // is.close(); // es.close(); // // // don't close stdout/stderr, it stops the program. // //fout.close(); // //ferr.close(); // // if (retcode != 0) { // throw new RunnerException("Error running script!"); // } // // } catch (IOException | InterruptedException e) { // throw new RunnerException(e); // } // } private List<NumberedLine> getLinesForTarget(String name, RootContext context, boolean allowMissing) { BuildTarget tgt = context.build(name, allowMissing); List<NumberedLine> lines = null; if (tgt != null) { lines = tgt.getLines(); } return lines; } private void setup(RootContext context) throws RunnerException { if (setupJob == null) { BuildTarget setupTgt = context.build("__setup__", true); if (setupTgt != null) { try { setupJob = setupTgt.eval(null, null, context); if (setupJob.getSettingBool("job.shexec", false)) { shexec(setupJob); } else { submit(setupJob); if (setupJob.getJobId() == null) { abort(); log.error("Error submitting job: __setup__"); throw new RunnerException("Error submitting job: __setup__"); } postSubmit(setupJob, context); } } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } } prelines = getLinesForTarget("__pre__", context, true); postlines = getLinesForTarget("__post__", context, true); } } public void submitAll(BuildTarget initialTarget, RootContext context) throws RunnerException { if (initialTarget.getOutputs() != null && initialTarget.getOutputs().size() > 0) { setup(context); markSkippable(initialTarget, context, initialTarget.getOutputs().get(0)); submitTargets(initialTarget, context, initialTarget.getOutputs().get(0), true); } runOpportunistic(context); } private void runOpportunistic(RootContext context) throws RunnerException { List<BuildTarget> opp = context.getOpportunistic(); boolean foundAll = true; for (BuildTarget tgt: opp) { for (String k: tgt.getDepends().keySet()) { // if the file isn't skippable (exists on disk) if (!tgt.getDepends().get(k).isSkippable()) { // if we haven't submitted the job if (tgt.getDepends().get(k).getJobDep() == null) { // if the job hasn't been previously scheduled if (findJobProviding(k) == null) { // we can't run this opportunistic job foundAll = false; continue; } } } } if (!foundAll) { continue; } try { JobDef job = tgt.eval(null, null, context); if (job.getSettingBool("job.shexec", false)) { shexec(job); } else { submit(job); } } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } } } private long markSkippable(BuildTarget target, RootContext context, String outputName) throws RunnerException { long lastModified = 0; String lastModifiedDep = ""; log.debug("MARKING SKIPPABLE FOR: "+ target); for (String dep: target.getDepends().keySet()) { long depLastMod = markSkippable(target.getDepends().get(dep), context, dep); log.debug(" Checking dep: " + dep + " lastmod: "+depLastMod); if (depLastMod == -1) { lastModified = -1; } else if (depLastMod > lastModified && lastModified > -1) { lastModified = depLastMod; lastModifiedDep = dep; } } log.debug("LAST MODIFIED: "+ target + " => " + lastModified); long retval = 0; if (lastModified > -1) { // Check to see if the outputName file exists on disk. // Note: this could also be used to look for remote resources (S3, etc), but not implemented for (String allout: target.getOutputs()) { File outputFile = new File(allout); if (outputFile.exists()) { if (outputFile.lastModified() >= lastModified) { log.debug(" Marking output-target as skippable: "+allout); target.setSkippable(allout); if (retval != -1 && outputFile.lastModified() > retval) { retval = outputFile.lastModified(); } } else { log.debug(" Marking output-target as not skippable: " + allout + " is older than " + lastModifiedDep + " (" + outputFile.lastModified() + " vs " + lastModified + ")"); retval = -1; } } else { if (target.getTempOutputs().contains(allout)) { log.debug(outputFile + " is a tmp file -- we can skip this (assuming downstream files are older than: "+lastModified+")"); return lastModified; } else { log.debug(" Marking output-target as not skippable: " + allout + " doesn't exist! (" + outputFile.getAbsolutePath()+")"); retval = -1; } } } } else { log.debug(" Marking output-target as not skippable: "+outputName + " a dependency will be built"); retval = -1; } log.debug("DONE: "+ target + " => " + retval); return retval; } private JobDependency submitTargets(BuildTarget target, RootContext context, String outputName, boolean isRoot) throws RunnerException { log.trace("Submitting target: "+outputName); // Can we skip this target (file exists) if (target.isSkippable()) { log.trace("Skipping target: "+outputName); return null; } // Has it already been submitted in another part of the tree? if (target.getJobDep() != null) { log.trace("Skipping target (already submitted): "+outputName); return target.getJobDep(); } // Have we already submitted this job in a prior run? JobDependency depJob = findJobProviding(outputName); if (depJob != null) { log.trace("Skipping target (job queued): "+outputName); return depJob; } // Okay... we are submitting this job, start with submitting it's dependencies... List<JobDependency> deps = new ArrayList<JobDependency>(); try { JobDef job = target.eval(prelines, postlines, context); if (job != null) { boolean blankRoot = false; if (isRoot) { String tmp = job.getBody().replaceAll("[ \\t\\r\\n]", ""); if (tmp.equals("")) { blankRoot = true; } } for (String out: target.getDepends().keySet()) { log.info("Submitting dependency: "+out); JobDependency dep = submitTargets(target.getDepends().get(out), context, out, blankRoot); if (dep != null) { deps.add(dep); } else { log.debug("Dependency not found?: "+out); } } job.addDependencies(deps); if (setupJob != null && setupJob.getJobId() != null) { job.addDependency(setupJob); } if (!blankRoot) { if (job.getDependencies().size()==0 && job.getSettingBool("job.shexec", false)) { shexec(job); } else { submit(job); if (job.getJobId() == null) { abort(); log.error("Error submitting job: "+ target); throw new RunnerException("Error submitting job: "+job); } postSubmit(job, context); this.outputFilesSubmitted.addAll(target.getOutputs()); this.tempOutputFilesSubmitted.addAll(target.getTempOutputs()); for (String out: target.getOutputs()) { submittedJobs.put(Paths.get(out).toAbsolutePath().toString(), job); } } } else { log.debug("Skipping empty target: "+target); job.setJobId(""); } target.setSubmittedJobDep(job); this.submittedJobDefs.add(job); } else { log.debug("Empty job for target: "+target); } return job; } catch (ASTParseException | ASTExecException e) { abort(); throw new RunnerException(e); } } private JobDependency findJobProviding(String input) throws RunnerException { log.trace("Looking for output: "+ input); String absInput = Paths.get(input).toAbsolutePath().toString(); if (submittedJobs.containsKey(absInput)) { log.debug("Found existing job providing: "+ absInput + " ("+submittedJobs.get(absInput).getJobId()+")"); JobDependency job = submittedJobs.get(absInput); if (isJobIdValid(job.getJobId())) { return job; } log.debug("Existing job: "+ job.getJobId()+" is no longer valid... resubmitting"); submittedJobs.remove(absInput); } return null; } public void done() throws RunnerException { // look for a __teardown__ target and execute if found. JobDef teardown = null; // TODO: Move this lower? And add all of the job defs to the context? // (Like -- show the final outputs and temp. files...) BuildTarget tdTgt = rootContext.build("__teardown__", true); if (tdTgt!=null) { boolean teardownBlank = false; try { //System.err.println("ALL OUTPUTS : "+StringUtils.join(",",outputFilesSubmitted)); //System.err.println("TEMP-OUTPUTS: "+StringUtils.join(",",tempOutputFilesSubmitted)); MapBuilder<String, VarValue> mb = new MapBuilder<String, VarValue>(); VarString[] tmpFileVar = new VarString[tempOutputFilesSubmitted.size()]; for (int i=0; i<tempOutputFilesSubmitted.size(); i++) { tmpFileVar[i] = new VarString(tempOutputFilesSubmitted.get(i)); } try { mb.put("cgpipe.tmpfiles", new VarList(tmpFileVar)); } catch (VarTypeException e) { throw new RunnerException(e); } VarString[] tmpFileVar2 = new VarString[outputFilesSubmitted.size()]; for (int i=0; i<outputFilesSubmitted.size(); i++) { tmpFileVar2[i] = new VarString(outputFilesSubmitted.get(i)); } try { mb.put("cgpipe.outputfiles", new VarList(tmpFileVar2)); } catch (VarTypeException e) { throw new RunnerException(e); } teardown = tdTgt.eval(null, null, rootContext, mb.build()); String tmp = teardown.getBody().replaceAll("[ \\t\\r\\n]", ""); if (tmp.equals("")) { teardownBlank = true; } } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } if (!teardownBlank) { if (teardown.getSettingBool("job.shexec", false)) { shexec(teardown); } else { teardown.addDependencies(submittedJobDefs); if (setupJob != null && setupJob.getJobId() != null) { teardown.addDependency(setupJob); } submit(teardown); if (teardown.getJobId() == null) { abort(); log.error("Error submitting job: __teardown__"); throw new RunnerException("Error submitting job: __teardown__"); } postSubmit(teardown, rootContext); } } } runnerDone(); if (joblog!=null) { joblog.close(); } } protected void logJob(JobDef job) { log.info("Submitted job: "+job.getJobId() +" "+ job.getName()); for (String k:job.getSettings()) { if (k.startsWith("job.")) { log.debug("setting: "+k+" => "+job.getSetting(k)); } } for (String out:job.getOutputs()) { log.debug("output: "+out); } for (String inp:job.getInputs()) { log.debug("input: "+inp); } for (String s: job.getBody().split("\n")) { log.debug("src: "+StringUtils.strip(s)); } if (!dryrun && joblog != null && job.getJobId() != null && !job.getJobId().equals("")) { JobLogRecord rec = new JobLogRecord(job.getJobId()); rec.setPipeline(CGPipe.getFilename()); rec.setWorkingDirectory(CGPipe.getWorkingDirectory()); rec.setName(job.getName()); rec.setSubmitTime(System.currentTimeMillis()); rec.setUser(System.getProperty("user.name")); for (JobDependency dep:job.getDependencies()) { if (job.getJobId()!=null && !job.getJobId().equals("")) { rec.addDep(dep.getJobId()); } } for (String out:job.getOutputs()) { rec.addOutput(out); } for (String inp:job.getInputs()) { rec.addInput(inp); } for (String s: job.getBody().split("\n")) { rec.addSrcLine(s); } for (String k:job.getSettings()) { if (k.startsWith("job.")) { if (k.equals("job.custom")) { for (String s: job.getSettings("job.custom")) { rec.addSetting(k, s); } } else { rec.addSetting(k, job.getSetting(k)); } } } joblog.writeRecord(rec); } } public void postSubmit(JobDef jobdef, RootContext context) throws RunnerException { BuildTarget postSubmitTgt = context.build("__postsubmit__", true); if (postSubmitTgt != null) { try { RootContext jobRoot = new RootContext(); for (String setting: jobdef.getSettings()) { if (setting.startsWith("job.")) { jobRoot.set(setting, jobdef.getSettingsMap().get(setting)); } } jobRoot.set("job.id", new VarString(jobdef.getJobId())); String deps = ""; for (JobDependency dep: jobdef.getDependencies()) { if (!deps.equals("")) { deps += ":"; } deps += dep.getJobId(); } jobRoot.set("job.depids", new VarString(deps)); JobDef postSubmit = postSubmitTgt.eval(null, null, null, jobRoot.cloneValues()); //System.err.println(postSubmit.getBody()); shexec(postSubmit); } catch (ASTParseException | ASTExecException e) { throw new RunnerException(e); } } } }
addded note about NFS
src/java/io/compgen/cgpipe/runner/JobRunner.java
addded note about NFS
<ide><path>rc/java/io/compgen/cgpipe/runner/JobRunner.java <ide> import java.io.IOException; <ide> import java.io.InputStream; <ide> import java.nio.charset.Charset; <add>import java.nio.file.AccessMode; <add>import java.nio.file.Files; <ide> import java.nio.file.Paths; <ide> import java.util.ArrayList; <ide> import java.util.HashMap; <ide> // Note: this could also be used to look for remote resources (S3, etc), but not implemented <ide> for (String allout: target.getOutputs()) { <ide> File outputFile = new File(allout); <add> <add> // TODO: This can fail for NFS mounted folders -- Add an extra check here for IOExceptions? outputFile.toPath().checkAccess()?? <ide> if (outputFile.exists()) { <ide> if (outputFile.lastModified() >= lastModified) { <ide> log.debug(" Marking output-target as skippable: "+allout);
JavaScript
mit
d3cefbc7d086c35155575f91141977ba4bdc1ddc
0
rafiqsaleh/VERCE,KNMI/VERCE,rafiqsaleh/VERCE,KNMI/VERCE,rafiqsaleh/VERCE,KNMI/VERCE,rafiqsaleh/VERCE,KNMI/VERCE,KNMI/VERCE,rafiqsaleh/VERCE
var wfStore = Ext.create('Ext.data.ArrayStore', { fields: [ {name: 'name'}, {name: 'desc'}, {name: 'status'}, {name: 'date', type: 'date', dateFormat: 'Y-m-d'}, {name: 'date2'}, {name: 'workflowId'} ], sortOnLoad: true, sorters: { property: 'date2', direction : 'DESC' }, proxy: { type: 'ajax', url: getWorkflowListURL, reader: { root: 'list' } }, autoLoad: true }); wfStore.load(function() { console.log(arguments); }); Ext.define('CF.view.WfGrid', { extend: 'Ext.grid.Panel', initComponent: function() { Ext.apply(this, { store: wfStore, id: 'wfGrid', columns: [ { text : 'Name', flex : 1, sortable : true, dataIndex: 'name' }, { text : 'Desc', flex : 1, sortable : true, dataIndex: 'desc', renderer: function(value, metaData, record, row, col, store, gridView) { if (value == null || value === '' || value === 'null') { return '-'; } return value; } }, { text : 'Status', width : 75, sortable : true, renderer : statusRenderer, dataIndex: 'status' }, { text : 'Date', width : 90, sortable : true, renderer : Ext.util.Format.dateRenderer('d - m - Y'), dataIndex: 'date' }, { xtype: 'actioncolumn', width: 55, items: [ { icon : localResourcesPath+'/img/Farm-Fresh_page_white_text.png', tooltip: 'Download logfiles', handler: function(grid, rowIndex, colIndex) { var rec = wfStore.getAt(rowIndex); window.open(downloadWorkflowOutputURL + '&workflowId=' + rec.get('workflowId'), '_self'); } }, { icon : localResourcesPath+'/img/delete-icon.png', tooltip: 'Delete instance', handler: function(grid, rowIndex, colIndex) { var rec = wfStore.getAt(rowIndex); Ext.Msg.confirm('Warning', 'Are you sure that you want to delete '+rec.get('name')+"?", function(btn) { if(btn === 'yes') { Ext.Ajax.request({ url: deleteWorkflowURL, params: { "workflowId": rec.get('workflowId') }, success: function(response){ wfStore.load(); }, failure: function(response) { Ext.Msg.alert("Error", "Delete failed!"); } }); } }); } } ] } ], flex: 1 }); this.callParent(arguments); } }); var refreshMenuControl = [ { html: '<strong style="color: #416DA3; position: relative; font-size: 12px; top: -1px;">Submited workflows</strong>' }, "->", { tooltip: 'Refresh list', handler: function() { wfStore.load(); }, style: { background:'none', backgroundImage: 'url('+localResourcesPath+'/img/refresh-icon.png)', backgroundSize: '90% 85%', backgroundRepeat: 'no-repeat', height: 32, width: 45, margin: 1, marginRight: '10px' }, height: 35, width: 35 }, { tooltip: 'Go to Document Library<br>(open in a new win)', height: 32, width: 32, handler: function() { openInNewTab('file-manager'); }, style: { background:'none', backgroundImage: 'url('+localResourcesPath+'/img/folder-icon.png)', backgroundSize: '90% 90%', backgroundRepeat: 'no-repeat', height: 32, width: 32, top: 0, margin: 1, marginRight: '10px' }, height: 32, width: 32 } ]; Ext.define('CF.view.Control', { extend:'Ext.form.Panel', layout: 'fit', viewConfig : { style : { overflow: 'scroll', overflowX: 'hidden' } }, dockedItems: [{ xtype: 'toolbar', dock: 'top', height: 35, items: refreshMenuControl }], items: [Ext.create('CF.view.WfGrid')] }); function statusRenderer(val) { if (val === 'INIT' || val === 'RUNNING') { return '<span style="color:green;">' + val + '</span>'; } else if (val === 'ERROR') { return '<span style="color:red;">' + val + '</span>'; } return val; } function openInNewTab(url) { var win=window.open(url, '_blank'); win.focus(); }
liferay-plugins-sdk-6.1.0/portlets/forward-modelling-portlet/docroot/js/app/view/Control.js
var wfStore = Ext.create('Ext.data.ArrayStore', { fields: [ {name: 'name'}, {name: 'desc'}, {name: 'status'}, {name: 'date', type: 'date', dateFormat: 'Y-m-d'}, {name: 'date2'}, {name: 'workflowId'} ], sortOnLoad: true, sorters: { property: 'date2', direction : 'DESC' }, proxy: { type: 'ajax', url: getWorkflowListURL, reader: { root: 'list' } }, autoLoad: true }); wfStore.load(function() { console.log(arguments); }); Ext.define('CF.view.WfGrid', { extend: 'Ext.grid.Panel', initComponent: function() { Ext.apply(this, { store: wfStore, id: 'wfGrid', columns: [ { text : 'Name', flex : 1, sortable : true, dataIndex: 'name' }, { text : 'Desc', flex : 1, sortable : true, dataIndex: 'desc' }, { text : 'Status', width : 75, sortable : true, renderer : statusRenderer, dataIndex: 'status' }, { text : 'Date', width : 90, sortable : true, renderer : Ext.util.Format.dateRenderer('d - m - Y'), dataIndex: 'date' }, { xtype: 'actioncolumn', width: 55, items: [ { icon : localResourcesPath+'/img/Farm-Fresh_page_white_text.png', tooltip: 'Download logfiles', handler: function(grid, rowIndex, colIndex) { var rec = wfStore.getAt(rowIndex); window.open(downloadWorkflowOutputURL + '&workflowId=' + rec.get('workflowId'), '_self'); } }, { icon : localResourcesPath+'/img/delete-icon.png', tooltip: 'Delete instance', handler: function(grid, rowIndex, colIndex) { var rec = wfStore.getAt(rowIndex); Ext.Msg.confirm('Warning', 'Are you sure that you want to delete '+rec.get('name')+"?", function(btn) { if(btn === 'yes') { Ext.Ajax.request({ url: deleteWorkflowURL, params: { "workflowId": rec.get('workflowId') }, success: function(response){ wfStore.load(); }, failure: function(response) { Ext.Msg.alert("Error", "Delete failed!"); } }); } }); } } ] } ], flex: 1 }); this.callParent(arguments); } }); var refreshMenuControl = [ { html: '<strong style="color: #416DA3; position: relative; font-size: 12px; top: -1px;">Submited workflows</strong>' }, "->", { tooltip: 'Refresh list', handler: function() { wfStore.load(); }, style: { background:'none', backgroundImage: 'url('+localResourcesPath+'/img/refresh-icon.png)', backgroundSize: '90% 85%', backgroundRepeat: 'no-repeat', height: 32, width: 45, margin: 1, marginRight: '10px' }, height: 35, width: 35 }, { tooltip: 'Go to Document Library<br>(open in a new win)', height: 32, width: 32, handler: function() { openInNewTab('file-manager'); }, style: { background:'none', backgroundImage: 'url('+localResourcesPath+'/img/folder-icon.png)', backgroundSize: '90% 90%', backgroundRepeat: 'no-repeat', height: 32, width: 32, top: 0, margin: 1, marginRight: '10px' }, height: 32, width: 32 } ]; Ext.define('CF.view.Control', { extend:'Ext.form.Panel', layout: 'fit', viewConfig : { style : { overflow: 'scroll', overflowX: 'hidden' } }, dockedItems: [{ xtype: 'toolbar', dock: 'top', height: 35, items: refreshMenuControl }], items: [Ext.create('CF.view.WfGrid')] }); function statusRenderer(val) { if (val === 'INIT' || val === 'RUNNING') { return '<span style="color:green;">' + val + '</span>'; } else if (val === 'ERROR') { return '<span style="color:red;">' + val + '</span>'; } return val; } function openInNewTab(url) { var win=window.open(url, '_blank'); win.focus(); }
Show '-' instead of null or '' in description field in control grid git-svn-id: cbdf988d75a706adaa9266e4086cf2cab9db20b4@3154 e3b179e7-38d4-40e1-96b2-8a60d5dcc249
liferay-plugins-sdk-6.1.0/portlets/forward-modelling-portlet/docroot/js/app/view/Control.js
Show '-' instead of null or '' in description field in control grid
<ide><path>iferay-plugins-sdk-6.1.0/portlets/forward-modelling-portlet/docroot/js/app/view/Control.js <ide> text : 'Desc', <ide> flex : 1, <ide> sortable : true, <del> dataIndex: 'desc' <add> dataIndex: 'desc', <add> renderer: function(value, metaData, record, row, col, store, gridView) { <add> if (value == null || value === '' || value === 'null') { <add> return '-'; <add> } <add> <add> return value; <add> } <ide> }, <ide> { <ide> text : 'Status',
Java
apache-2.0
5c17817834fa8cfab37f467c4ab957a9cf184eae
0
zibhub/GNDMS,zibhub/GNDMS,zibhub/GNDMS,zibhub/GNDMS
package de.zib.gndms.dspace.service; /* * Copyright 2008-2011 Zuse Institute Berlin (ZIB) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import de.zib.gndms.common.dspace.service.SliceService; import de.zib.gndms.common.logic.config.Configuration; import de.zib.gndms.common.rest.*; import de.zib.gndms.gndmc.gorfx.TaskClient; import de.zib.gndms.infra.system.GNDMSystem; import de.zib.gndms.logic.model.dspace.NoSuchElementException; import de.zib.gndms.logic.model.dspace.*; import de.zib.gndms.model.dspace.Slice; import de.zib.gndms.model.dspace.SliceKind; import de.zib.gndms.model.dspace.Subspace; import de.zib.gndms.model.util.TxFrame; import de.zib.gndms.neomodel.gorfx.Taskling; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.FileCopyUtils; import org.springframework.web.bind.annotation.*; import org.springframework.web.client.RestTemplate; import org.springframework.web.multipart.MultipartFile; import javax.annotation.PostConstruct; import javax.inject.Inject; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import java.io.*; import java.util.*; // import de.zib.gndms.neomodel.gorfx.Taskling; /** * The sliceId service implementation. * * @author Ulrike Golas */ @Controller @RequestMapping(value = "/dspace") public class SliceServiceImpl implements SliceService { protected final Logger logger = LoggerFactory.getLogger(this.getClass()); private EntityManagerFactory emf; private EntityManager em; private String baseUrl; private SubspaceProvider subspaceProvider; private SliceKindProvider sliceKindProvider; private SliceProvider sliceProvider; private List< String > sliceFacetNames; private UriFactory uriFactory; private GNDMSystem system; private RestTemplate restTemplate; @Inject public void setSliceKindProvider(SliceKindProvider sliceKindProvider) { this.sliceKindProvider = sliceKindProvider; } @Inject public void setSliceProvider(SliceProvider sliceProvider) { this.sliceProvider = sliceProvider; } public void setUriFactory(UriFactory uriFactory) { this.uriFactory = uriFactory; } /** * Initialization of the sliceId service. */ @PostConstruct public final void init() { setUriFactory( new UriFactory() ); } @Override @RequestMapping( value = "/_{subspaceId}/_{sliceKindId}/_{sliceId}", method = RequestMethod.GET ) public final ResponseEntity< Facets > listSliceFacets( @PathVariable final String subspaceId, @PathVariable final String sliceKindId, @PathVariable final String sliceId, @RequestHeader( "DN" ) final String dn ) { GNDMSResponseHeader headers = setHeaders( subspaceId, sliceKindId, sliceId, dn ); try { // check for the existance of that slice findSliceOfKind( subspaceId, sliceKindId, sliceId ); return new ResponseEntity< Facets >( new Facets( listFacetsOfSlice( subspaceId, sliceKindId, sliceId ) ), headers, HttpStatus.OK ); } catch ( NoSuchElementException ne ) { logger.warn( "The sliceId " + sliceId + " of sliceId kind " + sliceKindId + "does not exist within the subspace " + subspaceId + "." ); return new ResponseEntity< Facets >( null, headers, HttpStatus.NOT_FOUND ); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}/config", method = RequestMethod.PUT) public final ResponseEntity<Void> setSliceConfiguration( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @RequestBody final Configuration config, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Slice slic = findSliceOfKind(subspace, sliceKind, slice); SliceConfiguration slConfig = SliceConfiguration .checkSliceConfig(config); // TODO check if we handled all important sliceId parameters, // otherwise SliceConfiguration has to be extended slic.setTerminationTime(slConfig.getTerminationTime()); slic.setTotalStorageSize(slConfig.getSize()); return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } catch (ClassCastException e) { logger.warn(e.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.BAD_REQUEST); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}", method = RequestMethod.POST) public final ResponseEntity<Specifier<Void>> transformSlice( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @RequestBody final Specifier<Void> newSliceKind, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Slice slic = findSliceOfKind(subspace, sliceKind, slice); SliceKind newSliceK = sliceKindProvider.get(subspace, newSliceKind.getUrl()); Subspace space = subspaceProvider.get(subspace); em = emf.createEntityManager(); TxFrame tx = new TxFrame(em); try { // TODO is this right? what is this uuid generator (last entry)? TransformSliceAction action = new TransformSliceAction( dn, slic.getTerminationTime(), newSliceK, space, slic.getTotalStorageSize(), null); action.setOwnEntityManager(em); logger.info("Calling action for transforming sliceId " + slice + "."); action.call(); tx.commit(); } finally { tx.finish(); if (em != null && em.isOpen()) { em.close(); } } Specifier<Void> spec = new Specifier<Void>(); HashMap<String, String> urimap = new HashMap<String, String>(2); urimap.put("service", "dspace"); urimap.put(UriFactory.SUBSPACE, subspace); urimap.put(UriFactory.SLICEKIND, sliceKind); urimap.put(UriFactory.SLICE, slice); spec.setUriMap(new HashMap<String, String>(urimap)); spec.setUrl(uriFactory.quoteUri(urimap)); return new ResponseEntity<Specifier<Void>>(spec, headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Specifier<Void>>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping( value = "/_{subspaceId}/_{sliceKindId}/_{sliceId}", method = RequestMethod.DELETE ) public final ResponseEntity<Specifier<Facets>> deleteSlice( @PathVariable final String subspaceId, @PathVariable final String sliceKindId, @PathVariable final String sliceId, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspaceId, sliceKindId, sliceId, dn); try { // submit action final Taskling ling = sliceProvider.deleteSlice( subspaceId, sliceId ); // get service facets of task final TaskClient client = new TaskClient( "" ); client.setRestTemplate( restTemplate ); final Specifier< Facets > spec = TaskClient.TaskServiceAux.getTaskSpecifier( client, ling.getId(), uriFactory, null, dn ); // return specifier for service facets return new ResponseEntity< Specifier< Facets > >( spec, headers, HttpStatus.OK ); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Specifier<Facets>>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{sliceId}/files", method = RequestMethod.GET) public final ResponseEntity<List<File>> listFiles( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String sliceId, @RequestParam(value = "attr", required = false) final Map<String, String> attr, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, sliceId, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slice = findSliceOfKind(subspace, sliceKind, sliceId); String path = space.getPathForSlice(slice); File dir = new File(path); if (dir.exists() && dir.canRead() && dir.isDirectory()) { File[] all = dir.listFiles(); List<File> files = new ArrayList<File>(); Collections.addAll( files, all ); return new ResponseEntity<List<File>>(files, headers, HttpStatus.OK); } else { return new ResponseEntity<List<File>>(null, headers, HttpStatus.FORBIDDEN); } } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<List<File>>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}/files", method = RequestMethod.DELETE) public final ResponseEntity<Void> deleteFiles( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slic = findSliceOfKind(subspace, sliceKind, slice); String path = space.getPathForSlice(slic); File dir = new File(path); if (dir.exists() && dir.canRead() && dir.isDirectory()) { File[] all = dir.listFiles(); boolean allDeleted = true; for (File file : all) { // TODO: this only works for direct files (no // subdirectories) allDeleted = allDeleted && file.delete(); } if (allDeleted) { return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } else { logger.warn("Some file in directory " + dir + "could not be deleted."); return new ResponseEntity<Void>(null, headers, HttpStatus.CONFLICT); } } else { logger.warn("Directory " + dir + "cannot be read or is no directory."); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}/gsiftp", method = RequestMethod.GET) public final ResponseEntity<String> getGridFtpUrl( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slic = findSliceOfKind(subspace, sliceKind, slice); return new ResponseEntity<String>( space.getGsiFtpPathForSlice(slic), headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<String>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{sliceId}/_{fileName}", method = RequestMethod.GET) public final ResponseEntity<Void> listFileContent( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String sliceId, @PathVariable final String fileName, @RequestHeader("ATTRS") final List<String> attrs, @RequestHeader("DN") final String dn, final OutputStream out) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, sliceId, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slice = findSliceOfKind(subspace, sliceKind, sliceId); String path = space.getPathForSlice(slice); File file = new File(path + File.pathSeparator + fileName); if (out == null) { final IllegalStateException illegalStateException = new IllegalStateException( "OutputStream not defined." ); logger.warn( illegalStateException.getMessage() ); throw illegalStateException; } if (file.exists() && file.canRead() && file.isFile()) { // TODO get requested file attributes if (attrs.contains("contents")) { FileCopyUtils.copy( new FileInputStream( file ), out ); } return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } else { logger.warn("File " + file + "cannot be read or is no file."); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } catch (FileNotFoundException e) { logger.warn(e.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } catch (IOException e) { logger.warn(e.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{sliceId}/_{fileName}", method = RequestMethod.PUT) public final ResponseEntity<Void> setFileContent( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String sliceId, @PathVariable final String fileName, @RequestBody final MultipartFile file, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, sliceId, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slice = findSliceOfKind(subspace, sliceKind, sliceId); String path = space.getPathForSlice(slice); File newFile = new File(path + File.pathSeparator + fileName); if (newFile.exists()) { logger.warn("File " + newFile + "will be overwritten. "); } file.transferTo( newFile ); //DataOutputStream dos = new DataOutputStream(new FileOutputStream(newFile)); //dos.write(file.getBytes()); //dos.close(); return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage(), ne); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } catch (FileNotFoundException e) { logger.warn(e.getMessage(), e); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } catch (IOException e) { logger.warn(e.getMessage(), e); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}/_{fileName}", method = RequestMethod.DELETE) public final ResponseEntity<Void> deleteFile( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @PathVariable final String fileName, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slic = findSliceOfKind(subspace, sliceKind, slice); String path = space.getPathForSlice(slic); File file = new File(path + File.pathSeparator + fileName); if (file.exists() && file.canWrite() && file.isFile()) { if (file.delete()) { return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } else { logger.warn("File " + file + "cannot be deleted."); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } else { logger.warn("File " + file + "cannot be written or is no file."); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } catch (NoSuchElementException ne) { logger.warn(ne.getMessage(), ne); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } } private List< Facet > listFacetsOfSlice( String subspaceId, String sliceKindId, String sliceId ) { Map< String, String > vars = new HashMap< String, String >( ); vars.put( "service", "dspace" ); vars.put( "subspace", subspaceId ); vars.put( "sliceKind", sliceKindId ); vars.put( "sliceId", sliceId ); List< Facet > facets = new LinkedList< Facet >( ); for( String facetName: sliceFacetNames ) { Facet facet = new Facet( facetName, uriFactory.sliceUri( vars, facetName ) ); facets.add( facet ); } return facets; } /** * Sets the GNDMS response header for a given subspace, sliceId kind, sliceId * and dn using the base URL. * * @param subspace * The subspace id. * @param sliceKind * The sliceId kind id. * @param slice * The sliceId id. * @param dn * The dn. * @return The response header for this subspace. */ private GNDMSResponseHeader setHeaders(final String subspace, final String sliceKind, final String slice, final String dn) { GNDMSResponseHeader headers = new GNDMSResponseHeader(); headers.setResourceURL(baseUrl + "/dspace/_" + subspace + "/_" + sliceKind + "/_" + slice); headers.setParentURL(baseUrl + "/dspace/_" + subspace + "/_" + sliceKind); if (dn != null) { headers.setDN(dn); } return headers; } /** * Returns a specific sliceId of a given sliceId kind id, if it exists in the * subspace. * * @param subspaceId * The subspace id. * @param sliceKindId * The sliceId kind id. * @param sliceId * The sliceId id. * @return The sliceId. * @throws NoSuchElementException * If no such sliceId exists. */ private Slice findSliceOfKind( final String subspaceId, final String sliceKindId, final String sliceId ) throws NoSuchElementException { Slice slice = sliceProvider.getSlice( subspaceId, sliceId ); SliceKind sliceK = sliceKindProvider.get( subspaceId, sliceKindId ); if( !slice.getKind().equals( sliceK ) ) { logger.error( "Slice " + sliceId + " is of sliceKind " + slice.getKind().getId() + " instead of " + sliceKindId ); throw new NoSuchElementException(); } return slice; } /** * Returns the base url of this sliceId service. * * @return the baseUrl */ public final String getBaseUrl() { return baseUrl; } /** * Sets the base url of this sliceId service. * * @param baseUrl * the baseUrl to set */ public final void setBaseUrl(final String baseUrl) { this.baseUrl = baseUrl; } /** * Returns the facets of this sliceId service. * * @return the sliceFacets */ public final List< String > getSliceFacetNames() { return sliceFacetNames; } /** * Sets the facets of this sliceId service. * * @param sliceFacetNames * the sliceFacets to set */ public final void setSliceFacetNames(final List< String > sliceFacetNames ) { this.sliceFacetNames = sliceFacetNames; } @Inject public final void setSubspaceProvider( SubspaceProvider subspaceProvider ) { this.subspaceProvider = subspaceProvider; } public GNDMSystem getSystem() { return system; } @Inject public void setSystem( GNDMSystem system ) { this.system = system; } @Inject public void setRestTemplate( RestTemplate restTemplate ) { this.restTemplate = restTemplate; } }
dspace/src/de/zib/gndms/dspace/service/SliceServiceImpl.java
package de.zib.gndms.dspace.service; /* * Copyright 2008-2011 Zuse Institute Berlin (ZIB) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import de.zib.gndms.common.dspace.service.SliceService; import de.zib.gndms.common.logic.config.Configuration; import de.zib.gndms.common.rest.Facets; import de.zib.gndms.common.rest.GNDMSResponseHeader; import de.zib.gndms.common.rest.Specifier; import de.zib.gndms.common.rest.UriFactory; import de.zib.gndms.gndmc.gorfx.TaskClient; import de.zib.gndms.infra.system.GNDMSystem; import de.zib.gndms.logic.model.dspace.NoSuchElementException; import de.zib.gndms.logic.model.dspace.*; import de.zib.gndms.model.dspace.Slice; import de.zib.gndms.model.dspace.SliceKind; import de.zib.gndms.model.dspace.Subspace; import de.zib.gndms.model.util.TxFrame; import de.zib.gndms.neomodel.gorfx.Taskling; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.FileCopyUtils; import org.springframework.web.bind.annotation.*; import org.springframework.web.client.RestTemplate; import org.springframework.web.multipart.MultipartFile; import javax.annotation.PostConstruct; import javax.inject.Inject; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import java.io.*; import java.util.*; // import de.zib.gndms.neomodel.gorfx.Taskling; /** * The sliceId service implementation. * * @author Ulrike Golas */ @Controller @RequestMapping(value = "/dspace") public class SliceServiceImpl implements SliceService { protected final Logger logger = LoggerFactory.getLogger(this.getClass()); private EntityManagerFactory emf; private EntityManager em; private String baseUrl; private SubspaceProvider subspaceProvider; private SliceKindProvider sliceKindProvider; private SliceProvider sliceProvider; private Facets sliceFacets; private UriFactory uriFactory; private GNDMSystem system; private RestTemplate restTemplate; @Inject public void setSliceKindProvider(SliceKindProvider sliceKindProvider) { this.sliceKindProvider = sliceKindProvider; } @Inject public void setSliceProvider(SliceProvider sliceProvider) { this.sliceProvider = sliceProvider; } public void setUriFactory(UriFactory uriFactory) { this.uriFactory = uriFactory; } /** * Initialization of the sliceId service. */ @PostConstruct public final void init() { setUriFactory( new UriFactory() ); } @Override @RequestMapping( value = "/_{subspaceId}/_{sliceKindId}/_{sliceId}", method = RequestMethod.GET ) public final ResponseEntity< Facets > listSliceFacets( @PathVariable final String subspaceId, @PathVariable final String sliceKindId, @PathVariable final String sliceId, @RequestHeader( "DN" ) final String dn ) { GNDMSResponseHeader headers = setHeaders( subspaceId, sliceKindId, sliceId, dn ); try { Slice slice = findSliceOfKind( subspaceId, sliceKindId, sliceId ); return new ResponseEntity< Facets >( sliceFacets, headers, HttpStatus.OK ); } catch ( NoSuchElementException ne ) { logger.warn( "The sliceId " + sliceId + " of sliceId kind " + sliceKindId + "does not exist within the subspace " + subspaceId + "." ); return new ResponseEntity< Facets >( null, headers, HttpStatus.NOT_FOUND ); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}/config", method = RequestMethod.PUT) public final ResponseEntity<Void> setSliceConfiguration( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @RequestBody final Configuration config, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Slice slic = findSliceOfKind(subspace, sliceKind, slice); SliceConfiguration slConfig = SliceConfiguration .checkSliceConfig(config); // TODO check if we handled all important sliceId parameters, // otherwise SliceConfiguration has to be extended slic.setTerminationTime(slConfig.getTerminationTime()); slic.setTotalStorageSize(slConfig.getSize()); return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } catch (ClassCastException e) { logger.warn(e.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.BAD_REQUEST); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}", method = RequestMethod.POST) public final ResponseEntity<Specifier<Void>> transformSlice( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @RequestBody final Specifier<Void> newSliceKind, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Slice slic = findSliceOfKind(subspace, sliceKind, slice); SliceKind newSliceK = sliceKindProvider.get(subspace, newSliceKind.getUrl()); Subspace space = subspaceProvider.get(subspace); em = emf.createEntityManager(); TxFrame tx = new TxFrame(em); try { // TODO is this right? what is this uuid generator (last entry)? TransformSliceAction action = new TransformSliceAction( dn, slic.getTerminationTime(), newSliceK, space, slic.getTotalStorageSize(), null); action.setOwnEntityManager(em); logger.info("Calling action for transforming sliceId " + slice + "."); action.call(); tx.commit(); } finally { tx.finish(); if (em != null && em.isOpen()) { em.close(); } } Specifier<Void> spec = new Specifier<Void>(); HashMap<String, String> urimap = new HashMap<String, String>(2); urimap.put("service", "dspace"); urimap.put(UriFactory.SUBSPACE, subspace); urimap.put(UriFactory.SLICEKIND, sliceKind); urimap.put(UriFactory.SLICE, slice); spec.setUriMap(new HashMap<String, String>(urimap)); spec.setUrl(uriFactory.quoteUri(urimap)); return new ResponseEntity<Specifier<Void>>(spec, headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Specifier<Void>>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping( value = "/_{subspaceId}/_{sliceKindId}/_{sliceId}", method = RequestMethod.DELETE ) public final ResponseEntity<Specifier<Facets>> deleteSlice( @PathVariable final String subspaceId, @PathVariable final String sliceKindId, @PathVariable final String sliceId, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspaceId, sliceKindId, sliceId, dn); try { // submit action final Taskling ling = sliceProvider.deleteSlice( subspaceId, sliceId ); // get service facets of task final TaskClient client = new TaskClient( "" ); client.setRestTemplate( restTemplate ); final Specifier< Facets > spec = TaskClient.TaskServiceAux.getTaskSpecifier( client, ling.getId(), uriFactory, null, dn ); // return specifier for service facets return new ResponseEntity< Specifier< Facets > >( spec, headers, HttpStatus.OK ); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Specifier<Facets>>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{sliceId}/files", method = RequestMethod.GET) public final ResponseEntity<List<File>> listFiles( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String sliceId, @RequestParam(value = "attr", required = false) final Map<String, String> attr, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, sliceId, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slice = findSliceOfKind(subspace, sliceKind, sliceId); String path = space.getPathForSlice(slice); File dir = new File(path); if (dir.exists() && dir.canRead() && dir.isDirectory()) { File[] all = dir.listFiles(); List<File> files = new ArrayList<File>(); Collections.addAll( files, all ); return new ResponseEntity<List<File>>(files, headers, HttpStatus.OK); } else { return new ResponseEntity<List<File>>(null, headers, HttpStatus.FORBIDDEN); } } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<List<File>>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}/files", method = RequestMethod.DELETE) public final ResponseEntity<Void> deleteFiles( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slic = findSliceOfKind(subspace, sliceKind, slice); String path = space.getPathForSlice(slic); File dir = new File(path); if (dir.exists() && dir.canRead() && dir.isDirectory()) { File[] all = dir.listFiles(); boolean allDeleted = true; for (File file : all) { // TODO: this only works for direct files (no // subdirectories) allDeleted = allDeleted && file.delete(); } if (allDeleted) { return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } else { logger.warn("Some file in directory " + dir + "could not be deleted."); return new ResponseEntity<Void>(null, headers, HttpStatus.CONFLICT); } } else { logger.warn("Directory " + dir + "cannot be read or is no directory."); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}/gsiftp", method = RequestMethod.GET) public final ResponseEntity<String> getGridFtpUrl( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slic = findSliceOfKind(subspace, sliceKind, slice); return new ResponseEntity<String>( space.getGsiFtpPathForSlice(slic), headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<String>(null, headers, HttpStatus.NOT_FOUND); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{sliceId}/_{fileName}", method = RequestMethod.GET) public final ResponseEntity<Void> listFileContent( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String sliceId, @PathVariable final String fileName, @RequestHeader("ATTRS") final List<String> attrs, @RequestHeader("DN") final String dn, final OutputStream out) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, sliceId, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slice = findSliceOfKind(subspace, sliceKind, sliceId); String path = space.getPathForSlice(slice); File file = new File(path + File.pathSeparator + fileName); if (out == null) { final IllegalStateException illegalStateException = new IllegalStateException( "OutputStream not defined." ); logger.warn( illegalStateException.getMessage() ); throw illegalStateException; } if (file.exists() && file.canRead() && file.isFile()) { // TODO get requested file attributes if (attrs.contains("contents")) { FileCopyUtils.copy( new FileInputStream( file ), out ); } return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } else { logger.warn("File " + file + "cannot be read or is no file."); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } catch (NoSuchElementException ne) { logger.warn(ne.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } catch (FileNotFoundException e) { logger.warn(e.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } catch (IOException e) { logger.warn(e.getMessage()); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{sliceId}/_{fileName}", method = RequestMethod.PUT) public final ResponseEntity<Void> setFileContent( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String sliceId, @PathVariable final String fileName, @RequestBody final MultipartFile file, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, sliceId, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slice = findSliceOfKind(subspace, sliceKind, sliceId); String path = space.getPathForSlice(slice); File newFile = new File(path + File.pathSeparator + fileName); if (newFile.exists()) { logger.warn("File " + newFile + "will be overwritten. "); } file.transferTo( newFile ); //DataOutputStream dos = new DataOutputStream(new FileOutputStream(newFile)); //dos.write(file.getBytes()); //dos.close(); return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } catch (NoSuchElementException ne) { logger.warn(ne.getMessage(), ne); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } catch (FileNotFoundException e) { logger.warn(e.getMessage(), e); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } catch (IOException e) { logger.warn(e.getMessage(), e); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } @Override @RequestMapping(value = "/_{subspace}/_{sliceKind}/_{slice}/_{fileName}", method = RequestMethod.DELETE) public final ResponseEntity<Void> deleteFile( @PathVariable final String subspace, @PathVariable final String sliceKind, @PathVariable final String slice, @PathVariable final String fileName, @RequestHeader("DN") final String dn) { GNDMSResponseHeader headers = setHeaders(subspace, sliceKind, slice, dn); try { Subspace space = subspaceProvider.get(subspace); Slice slic = findSliceOfKind(subspace, sliceKind, slice); String path = space.getPathForSlice(slic); File file = new File(path + File.pathSeparator + fileName); if (file.exists() && file.canWrite() && file.isFile()) { if (file.delete()) { return new ResponseEntity<Void>(null, headers, HttpStatus.OK); } else { logger.warn("File " + file + "cannot be deleted."); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } else { logger.warn("File " + file + "cannot be written or is no file."); return new ResponseEntity<Void>(null, headers, HttpStatus.FORBIDDEN); } } catch (NoSuchElementException ne) { logger.warn(ne.getMessage(), ne); return new ResponseEntity<Void>(null, headers, HttpStatus.NOT_FOUND); } } /** * Sets the GNDMS response header for a given subspace, sliceId kind, sliceId * and dn using the base URL. * * @param subspace * The subspace id. * @param sliceKind * The sliceId kind id. * @param slice * The sliceId id. * @param dn * The dn. * @return The response header for this subspace. */ private GNDMSResponseHeader setHeaders(final String subspace, final String sliceKind, final String slice, final String dn) { GNDMSResponseHeader headers = new GNDMSResponseHeader(); headers.setResourceURL(baseUrl + "/dspace/_" + subspace + "/_" + sliceKind + "/_" + slice); headers.setParentURL(baseUrl + "/dspace/_" + subspace + "/_" + sliceKind); if (dn != null) { headers.setDN(dn); } return headers; } /** * Returns a specific sliceId of a given sliceId kind id, if it exists in the * subspace. * * @param subspaceId * The subspace id. * @param sliceKindId * The sliceId kind id. * @param sliceId * The sliceId id. * @return The sliceId. * @throws NoSuchElementException * If no such sliceId exists. */ private Slice findSliceOfKind( final String subspaceId, final String sliceKindId, final String sliceId ) throws NoSuchElementException { Slice slice = sliceProvider.getSlice( subspaceId, sliceId ); SliceKind sliceK = sliceKindProvider.get( subspaceId, sliceKindId ); if( !slice.getKind().equals( sliceK ) ) { logger.error( "Slice " + sliceId + " is of sliceKind " + slice.getKind().getId() + " instead of " + sliceKindId ); throw new NoSuchElementException(); } return slice; } /** * Returns the base url of this sliceId service. * * @return the baseUrl */ public final String getBaseUrl() { return baseUrl; } /** * Sets the base url of this sliceId service. * * @param baseUrl * the baseUrl to set */ public final void setBaseUrl(final String baseUrl) { this.baseUrl = baseUrl; } /** * Returns the facets of this sliceId service. * * @return the sliceFacets */ public final Facets getSliceFacets() { return sliceFacets; } /** * Sets the facets of this sliceId service. * * @param sliceFacets * the sliceFacets to set */ public final void setSliceFacets(final Facets sliceFacets) { this.sliceFacets = sliceFacets; } @Inject public final void setSubspaceProvider( SubspaceProvider subspaceProvider ) { this.subspaceProvider = subspaceProvider; } public GNDMSystem getSystem() { return system; } @Inject public void setSystem( GNDMSystem system ) { this.system = system; } @Inject public void setRestTemplate( RestTemplate restTemplate ) { this.restTemplate = restTemplate; } }
List facets of slice
dspace/src/de/zib/gndms/dspace/service/SliceServiceImpl.java
List facets of slice
<ide><path>space/src/de/zib/gndms/dspace/service/SliceServiceImpl.java <ide> <ide> import de.zib.gndms.common.dspace.service.SliceService; <ide> import de.zib.gndms.common.logic.config.Configuration; <del>import de.zib.gndms.common.rest.Facets; <del>import de.zib.gndms.common.rest.GNDMSResponseHeader; <del>import de.zib.gndms.common.rest.Specifier; <del>import de.zib.gndms.common.rest.UriFactory; <add>import de.zib.gndms.common.rest.*; <ide> import de.zib.gndms.gndmc.gorfx.TaskClient; <ide> import de.zib.gndms.infra.system.GNDMSystem; <ide> import de.zib.gndms.logic.model.dspace.NoSuchElementException; <ide> private SubspaceProvider subspaceProvider; <ide> private SliceKindProvider sliceKindProvider; <ide> private SliceProvider sliceProvider; <del> private Facets sliceFacets; <add> private List< String > sliceFacetNames; <ide> private UriFactory uriFactory; <ide> <ide> private GNDMSystem system; <ide> GNDMSResponseHeader headers = setHeaders( subspaceId, sliceKindId, sliceId, dn ); <ide> <ide> try { <del> Slice slice = findSliceOfKind( subspaceId, sliceKindId, sliceId ); <del> return new ResponseEntity< Facets >( sliceFacets, headers, HttpStatus.OK ); <add> // check for the existance of that slice <add> findSliceOfKind( subspaceId, sliceKindId, sliceId ); <add> <add> return new ResponseEntity< Facets >( new Facets( listFacetsOfSlice( subspaceId, sliceKindId, sliceId ) ), headers, HttpStatus.OK ); <ide> } catch ( NoSuchElementException ne ) { <ide> logger.warn( "The sliceId " + sliceId + " of sliceId kind " + sliceKindId <ide> + "does not exist within the subspace " + subspaceId + "." ); <ide> } <ide> } <ide> <add> private List< Facet > listFacetsOfSlice( String subspaceId, String sliceKindId, String sliceId ) { <add> Map< String, String > vars = new HashMap< String, String >( ); <add> vars.put( "service", "dspace" ); <add> vars.put( "subspace", subspaceId ); <add> vars.put( "sliceKind", sliceKindId ); <add> vars.put( "sliceId", sliceId ); <add> <add> List< Facet > facets = new LinkedList< Facet >( ); <add> <add> for( String facetName: sliceFacetNames ) { <add> Facet facet = new Facet( facetName, uriFactory.sliceUri( vars, facetName ) ); <add> facets.add( facet ); <add> } <add> return facets; <add> } <add> <ide> /** <ide> * Sets the GNDMS response header for a given subspace, sliceId kind, sliceId <ide> * and dn using the base URL. <ide> * <ide> * @return the sliceFacets <ide> */ <del> public final Facets getSliceFacets() { <del> return sliceFacets; <add> public final List< String > getSliceFacetNames() { <add> return sliceFacetNames; <ide> } <ide> <ide> /** <ide> * Sets the facets of this sliceId service. <ide> * <del> * @param sliceFacets <add> * @param sliceFacetNames <ide> * the sliceFacets to set <ide> */ <del> public final void setSliceFacets(final Facets sliceFacets) { <del> this.sliceFacets = sliceFacets; <add> public final void setSliceFacetNames(final List< String > sliceFacetNames ) { <add> this.sliceFacetNames = sliceFacetNames; <ide> } <ide> <ide> @Inject
Java
apache-2.0
27efb234c336da7b507de93900d11b9b1bfd363e
0
ecanzonieri/pyleus,poros/pyleus,imcom/pyleus,Yelp/pyleus,stallman-cui/pyleus,stallman-cui/pyleus,poros/pyleus,dapuck/pyleus,jirafe/pyleus,dapuck/pyleus,Yelp/pyleus,mzbyszynski/pyleus,mzbyszynski/pyleus,patricklucas/pyleus,ecanzonieri/pyleus,imcom/pyleus,jirafe/pyleus,imcom/pyleus,patricklucas/pyleus
package com.yelp.pyleus.serializer; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.DataOutputStream; import java.util.List; import java.util.Map; import java.util.HashMap; import backtype.storm.multilang.BoltMsg; import backtype.storm.multilang.ISerializer; import backtype.storm.multilang.NoOutputException; import backtype.storm.multilang.ShellMsg; import backtype.storm.multilang.SpoutMsg; import backtype.storm.task.TopologyContext; import backtype.storm.utils.Utils; import org.apache.log4j.Logger; import org.msgpack.MessagePack; import org.msgpack.template.Template; import static org.msgpack.template.Templates.tMap; import static org.msgpack.template.Templates.TString; import static org.msgpack.template.Templates.TValue; import static org.msgpack.template.Templates.tList; import org.msgpack.type.Value; public class MessagePackSerializer implements ISerializer { public static Logger LOG = Logger.getLogger(MessagePackSerializer.class); private DataOutputStream processIn; private InputStream processOut; private MessagePack msgPack; private Template<Map<String,Value>> mapTmpl; private Template<List<Value>> listTmpl; @Override public void initialize(OutputStream processIn, InputStream processOut) { this.processIn = new DataOutputStream(processIn); this.processOut = processOut; this.msgPack = new MessagePack(); this.mapTmpl = tMap(TString, TValue); this.listTmpl = tList(TValue); } private Map<String, Object> getMapFromContext(TopologyContext context) { Map context_map = new HashMap(); context_map.put("taskid", context.getThisTaskId()); context_map.put("task->component", context.getTaskToComponent()); return context_map; } @Override public Number connect(Map conf, TopologyContext context) throws IOException, NoOutputException { // Create the setup message for the initial handshake Map<String, Object> setupmsg = new HashMap<String, Object>(); setupmsg.put("conf", conf); setupmsg.put("pidDir", context.getPIDDir()); setupmsg.put("context", getMapFromContext(context)); // Write the message to the pipe writeMessage(setupmsg); Map<String, Value> pidmsg = readMessage(); Value pid = pidmsg.get("pid"); return (Number) pid.asIntegerValue().getInt(); } @Override public ShellMsg readShellMsg() throws IOException, NoOutputException { Map<String, Value> msg = readMessage(); ShellMsg shellMsg = new ShellMsg(); String command = msg.get("command").asRawValue().getString(); shellMsg.setCommand(command); Object id = null; Value valueId = msg.get("id"); /* Since spouts can use both numbers and strings as ids, while bolts * only use strings, the check during acking was failing. Turning * everything into strings solves the problem. The issue does not * exist with JSON, instead.*/ if (valueId != null) { if (valueId.isIntegerValue()) { id = msg.get("id").asIntegerValue().toString(); } else { id = valueId.asRawValue().getString(); } } shellMsg.setId(id); Value log = msg.get("msg"); if(log != null) { shellMsg.setMsg(log.asRawValue().getString()); } String stream = Utils.DEFAULT_STREAM_ID; Value streamValue = msg.get("stream"); if (streamValue != null) { stream = streamValue.asRawValue().getString(); } shellMsg.setStream(stream); Value taskValue = msg.get("task"); if (taskValue != null) { shellMsg.setTask(taskValue.asIntegerValue().getLong()); } else { shellMsg.setTask(0); } Value need_task_ids = msg.get("need_task_ids"); if (need_task_ids == null || (need_task_ids).asBooleanValue().getBoolean()) { shellMsg.setNeedTaskIds(true); } else { shellMsg.setNeedTaskIds(false); } Value tupleValue = msg.get("tuple"); if (tupleValue != null) { for (Value element:tupleValue.asArrayValue()) { /* Tuples need to be Kryo serializable, while some msgpack-java type * are not. Registering a Kryo serializer for them is not trivial at all, * given how this package works. Problematic types are ByteArray, String, * Map and List. This change is needed for ByteArrays and Strings. Nested * Lists and Maps are not supported.*/ shellMsg.addTuple(this.convertMsgpackType(element)); } } else { } Value anchorsValue = msg.get("anchors"); if(anchorsValue != null) { for (Value v: anchorsValue.asArrayValue()) { shellMsg.addAnchor(v.asRawValue().getString()); } } return shellMsg; } private Object convertMsgpackType(Value element) { if (element.isRawValue()) { return element.asRawValue().getString(); } else if (element.isBooleanValue()) { return element.asBooleanValue().getBoolean(); } else if (element.isFloatValue()) { return element.asFloatValue().getFloat(); } else if (element.isIntegerValue()) { return element.asIntegerValue().getInt(); } else if (element.isNilValue()) { return null; } else { return element; } } @Override public void writeBoltMsg(BoltMsg boltMsg) throws IOException { Map<String, Object> map = new HashMap<String, Object>(); map.put("id", boltMsg.getId()); map.put("comp", boltMsg.getComp()); map.put("stream", boltMsg.getStream()); map.put("task", boltMsg.getTask()); map.put("tuple", boltMsg.getTuple()); writeMessage(map); } @Override public void writeSpoutMsg(SpoutMsg spoutMsg) throws IOException { Map<String, Object> map = new HashMap<String, Object>(); map.put("command", spoutMsg.getCommand()); map.put("id", spoutMsg.getId()); writeMessage(map); } @Override public void writeTaskIds(List<Integer> taskIds) throws IOException { writeMessage(taskIds); } private Map<String, Value> readMessage() throws IOException { return msgPack.read(this.processOut, this.mapTmpl); } private void writeMessage(List<Integer> msg) throws IOException{ msgPack.write(this.processIn, msg); this.processIn.flush(); } private void writeMessage(Map<String, Object> msg) throws IOException { msgPack.write(this.processIn, msg); this.processIn.flush(); } }
topology_builder/src/main/java/com/yelp/pyleus/serializer/MessagePackSerializer.java
package com.yelp.pyleus.serializer; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.DataOutputStream; import java.util.List; import java.util.Map; import java.util.HashMap; import backtype.storm.multilang.BoltMsg; import backtype.storm.multilang.ISerializer; import backtype.storm.multilang.NoOutputException; import backtype.storm.multilang.ShellMsg; import backtype.storm.multilang.SpoutMsg; import backtype.storm.task.TopologyContext; import backtype.storm.utils.Utils; import org.apache.log4j.Logger; import org.msgpack.MessagePack; import org.msgpack.template.Template; import static org.msgpack.template.Templates.tMap; import static org.msgpack.template.Templates.TString; import static org.msgpack.template.Templates.TValue; import static org.msgpack.template.Templates.tList; import org.msgpack.type.Value; public class MessagePackSerializer implements ISerializer { public static Logger LOG = Logger.getLogger(MessagePackSerializer.class); private DataOutputStream processIn; private InputStream processOut; private MessagePack msgPack; private Template<Map<String,Value>> mapTmpl; private Template<List<Value>> listTmpl; @Override public void initialize(OutputStream processIn, InputStream processOut) { this.processIn = new DataOutputStream(processIn); this.processOut = processOut; this.msgPack = new MessagePack(); this.mapTmpl = tMap(TString, TValue); this.listTmpl = tList(TValue); } private Map<String, Object> getMapFromContext(TopologyContext context) { Map context_map = new HashMap(); context_map.put("taskid", context.getThisTaskId()); context_map.put("task->component", context.getTaskToComponent()); return context_map; } @Override public Number connect(Map conf, TopologyContext context) throws IOException, NoOutputException { // Create the setup message for the initial handshake Map<String, Object> setupmsg = new HashMap<String, Object>(); setupmsg.put("conf", conf); setupmsg.put("pidDir", context.getPIDDir()); setupmsg.put("context", getMapFromContext(context)); // Write the message to the pipe writeMessage(setupmsg); Map<String, Value> pidmsg = readMessage(); Value pid = pidmsg.get("pid"); return (Number) pid.asIntegerValue().getInt(); } @Override public ShellMsg readShellMsg() throws IOException, NoOutputException { Map<String, Value> msg = readMessage(); ShellMsg shellMsg = new ShellMsg(); String command = msg.get("command").asRawValue().getString(); shellMsg.setCommand(command); Object id = null; Value valueId = msg.get("id"); /* Since spouts can use both numbers and strings as ids, while bolts * only use strings, the check during acking was failing. Turning * everything into strings solves the problem. The issue does not * exist with JSON, instead.*/ if (valueId != null) { if (valueId.isIntegerValue()) { id = msg.get("id").asIntegerValue().toString(); } else { id = valueId.asRawValue().getString(); } } shellMsg.setId(id); Value log = msg.get("msg"); if(log != null) { shellMsg.setMsg(log.asRawValue().getString()); } String stream = Utils.DEFAULT_STREAM_ID; Value streamValue = msg.get("stream"); if (streamValue != null) { stream = streamValue.asRawValue().getString(); } shellMsg.setStream(stream); Value taskValue = msg.get("task"); if (taskValue != null) { shellMsg.setTask(taskValue.asIntegerValue().getLong()); } else { shellMsg.setTask(0); } Value need_task_ids = msg.get("need_task_ids"); if (need_task_ids == null || (need_task_ids).asBooleanValue().getBoolean()) { shellMsg.setNeedTaskIds(true); } else { shellMsg.setNeedTaskIds(false); } Value tupleValue = msg.get("tuple"); if (tupleValue != null) { for (Value element:tupleValue.asArrayValue()) { /* Tuples need to be Kryo serializable, while some msgpack-java type * are not. Registering a Kryo serializer for them is not trivial at all, * given how this package works. Problematic types are ByteArray, String, * Map and List. This change is needed for ByteArrays and Strings. Nested * Lists and Maps are not supported.*/ Object elementObject = element; if (element.isRawValue()) { elementObject = element.asRawValue().getString(); } shellMsg.addTuple(elementObject); } } Value anchorsValue = msg.get("anchors"); if(anchorsValue != null) { for (Value v: anchorsValue.asArrayValue()) { shellMsg.addAnchor(v.asRawValue().getString()); } } return shellMsg; } @Override public void writeBoltMsg(BoltMsg boltMsg) throws IOException { Map<String, Object> map = new HashMap<String, Object>(); map.put("id", boltMsg.getId()); map.put("comp", boltMsg.getComp()); map.put("stream", boltMsg.getStream()); map.put("task", boltMsg.getTask()); map.put("tuple", boltMsg.getTuple()); writeMessage(map); } @Override public void writeSpoutMsg(SpoutMsg spoutMsg) throws IOException { Map<String, Object> map = new HashMap<String, Object>(); map.put("command", spoutMsg.getCommand()); map.put("id", spoutMsg.getId()); writeMessage(map); } @Override public void writeTaskIds(List<Integer> taskIds) throws IOException { writeMessage(taskIds); } private Map<String, Value> readMessage() throws IOException { return msgPack.read(this.processOut, this.mapTmpl); } private void writeMessage(List<Integer> msg) throws IOException{ msgPack.write(this.processIn, msg); this.processIn.flush(); } private void writeMessage(Map<String, Object> msg) throws IOException { msgPack.write(this.processIn, msg); this.processIn.flush(); } }
Convert msgpack Value to the proper java type
topology_builder/src/main/java/com/yelp/pyleus/serializer/MessagePackSerializer.java
Convert msgpack Value to the proper java type
<ide><path>opology_builder/src/main/java/com/yelp/pyleus/serializer/MessagePackSerializer.java <ide> * given how this package works. Problematic types are ByteArray, String, <ide> * Map and List. This change is needed for ByteArrays and Strings. Nested <ide> * Lists and Maps are not supported.*/ <del> Object elementObject = element; <del> if (element.isRawValue()) { <del> elementObject = element.asRawValue().getString(); <del> } <del> shellMsg.addTuple(elementObject); <add> shellMsg.addTuple(this.convertMsgpackType(element)); <ide> } <add> } else { <ide> } <ide> <ide> Value anchorsValue = msg.get("anchors"); <ide> } <ide> } <ide> return shellMsg; <add> } <add> <add> private Object convertMsgpackType(Value element) { <add> if (element.isRawValue()) { <add> return element.asRawValue().getString(); <add> } else if (element.isBooleanValue()) { <add> return element.asBooleanValue().getBoolean(); <add> } else if (element.isFloatValue()) { <add> return element.asFloatValue().getFloat(); <add> } else if (element.isIntegerValue()) { <add> return element.asIntegerValue().getInt(); <add> } else if (element.isNilValue()) { <add> return null; <add> } else { <add> return element; <add> } <ide> } <ide> <ide> @Override
Java
mit
caf89b23bb6b4b7c60e8daae56022bd54576e5af
0
chiranjith/cucumber-jvm,sventorben/cucumber-jvm,bartkeizer/cucumber-jvm,rlagunov-anaplan/cucumber-jvm,bartkeizer/cucumber-jvm,demos74dx/cucumber-jvm,ushkinaz/cucumber-jvm,dkowis/cucumber-jvm,NickCharsley/cucumber-jvm,HendrikSP/cucumber-jvm,DPUkyle/cucumber-jvm,dkowis/cucumber-jvm,sventorben/cucumber-jvm,HendrikSP/cucumber-jvm,hcawebdevelopment/cucumber-jvm,danielwegener/cucumber-jvm,andyb-ge/cucumber-jvm,dkowis/cucumber-jvm,ppotanin/cucumber-jvm,paoloambrosio/cucumber-jvm,ppotanin/cucumber-jvm,sventorben/cucumber-jvm,dkowis/cucumber-jvm,NickCharsley/cucumber-jvm,DPUkyle/cucumber-jvm,joansmith/cucumber-jvm,bartkeizer/cucumber-jvm,danielwegener/cucumber-jvm,hcawebdevelopment/cucumber-jvm,ArishArbab/cucumber-jvm,PeterDG/cucumberPro,HendrikSP/cucumber-jvm,goushijie/cucumber-jvm,paoloambrosio/cucumber-jvm,ppotanin/cucumber-jvm,PeterDG/cucumberPro,demos74dx/cucumber-jvm,dkowis/cucumber-jvm,demos74dx/cucumber-jvm,dkowis/cucumber-jvm,paoloambrosio/cucumber-jvm,cucumber/cucumber-jvm,hcawebdevelopment/cucumber-jvm,DPUkyle/cucumber-jvm,joansmith/cucumber-jvm,NickCharsley/cucumber-jvm,chiranjith/cucumber-jvm,goushijie/cucumber-jvm,DPUkyle/cucumber-jvm,ArishArbab/cucumber-jvm,flaviuratiu/cucumber-jvm,HendrikSP/cucumber-jvm,DPUkyle/cucumber-jvm,danielwegener/cucumber-jvm,andyb-ge/cucumber-jvm,flaviuratiu/cucumber-jvm,brasmusson/cucumber-jvm,Draeval/cucumber-jvm,sghill/cucumber-jvm,goushijie/cucumber-jvm,HendrikSP/cucumber-jvm,ArishArbab/cucumber-jvm,sghill/cucumber-jvm,paoloambrosio/cucumber-jvm,cucumber/cucumber-jvm,Draeval/cucumber-jvm,chrishowejones/cucumber-jvm,flaviuratiu/cucumber-jvm,joansmith/cucumber-jvm,HendrikSP/cucumber-jvm,PeterDG/cucumberPro,rlagunov-anaplan/cucumber-jvm,sventorben/cucumber-jvm,chrishowejones/cucumber-jvm,goushijie/cucumber-jvm,ArishArbab/cucumber-jvm,sghill/cucumber-jvm,chrishowejones/cucumber-jvm,andyb-ge/cucumber-jvm,andyb-ge/cucumber-jvm,ppotanin/cucumber-jvm,ppotanin/cucumber-jvm,ArishArbab/cucumber-jvm,cucumber/cucumber-jvm,ushkinaz/cucumber-jvm,joansmith/cucumber-jvm,brasmusson/cucumber-jvm,Draeval/cucumber-jvm,joansmith/cucumber-jvm,chrishowejones/cucumber-jvm,danielwegener/cucumber-jvm,NickCharsley/cucumber-jvm,sventorben/cucumber-jvm,sghill/cucumber-jvm,demos74dx/cucumber-jvm,chiranjith/cucumber-jvm,PeterDG/cucumberPro,sventorben/cucumber-jvm,bartkeizer/cucumber-jvm,brasmusson/cucumber-jvm,chiranjith/cucumber-jvm,Draeval/cucumber-jvm,rlagunov-anaplan/cucumber-jvm,hcawebdevelopment/cucumber-jvm,danielwegener/cucumber-jvm,cucumber/cucumber-jvm,andyb-ge/cucumber-jvm,PeterDG/cucumberPro,PeterDG/cucumberPro,flaviuratiu/cucumber-jvm,demos74dx/cucumber-jvm,NickCharsley/cucumber-jvm,bartkeizer/cucumber-jvm,brasmusson/cucumber-jvm,chrishowejones/cucumber-jvm,cucumber/cucumber-jvm,chrishowejones/cucumber-jvm,ppotanin/cucumber-jvm,bartkeizer/cucumber-jvm,Draeval/cucumber-jvm,ushkinaz/cucumber-jvm,rlagunov-anaplan/cucumber-jvm,joansmith/cucumber-jvm,brasmusson/cucumber-jvm,ushkinaz/cucumber-jvm,goushijie/cucumber-jvm,hcawebdevelopment/cucumber-jvm,danielwegener/cucumber-jvm,ushkinaz/cucumber-jvm,demos74dx/cucumber-jvm,ushkinaz/cucumber-jvm,rlagunov-anaplan/cucumber-jvm,NickCharsley/cucumber-jvm,goushijie/cucumber-jvm,ArishArbab/cucumber-jvm,chiranjith/cucumber-jvm,flaviuratiu/cucumber-jvm,sghill/cucumber-jvm,Draeval/cucumber-jvm,andyb-ge/cucumber-jvm,flaviuratiu/cucumber-jvm,hcawebdevelopment/cucumber-jvm,chiranjith/cucumber-jvm,sghill/cucumber-jvm,rlagunov-anaplan/cucumber-jvm,DPUkyle/cucumber-jvm
package cucumber.runtime.xstream; import cucumber.deps.com.thoughtworks.xstream.converters.ConversionException; import java.text.FieldPosition; import java.text.Format; import java.text.ParsePosition; import java.util.ArrayList; import java.util.List; import java.util.Locale; import static java.util.Arrays.asList; class ConverterWithEnumFormat<T extends Enum> extends ConverterWithFormat<T> { private final List<Format> formats = new ArrayList<Format>(); private final Locale locale; private final Class<? extends Enum> typeClass; ConverterWithEnumFormat(Locale locale, Class<? extends Enum> enumClass) { super(new Class[]{enumClass}); this.locale = locale; this.typeClass = enumClass; formats.add(new OriginalFormat()); formats.add(new LowercaseFormat()); formats.add(new UppercaseFormat()); formats.add(new CapitalizeFormat()); } @Override public T transform(String string) { try { return super.transform(string); } catch (ConversionException e) { String allowed = asList(typeClass.getEnumConstants()).toString(); throw new ConversionException(String.format("Couldn't convert %s to %s. Legal values are %s", string, typeClass.getName(), allowed)); } } @Override public List<Format> getFormats() { return formats; } private class OriginalFormat extends Format { @Override public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) { return toAppendTo.append(String.valueOf(obj)); } @Override public Object parseObject(String source, ParsePosition pos) { return source == null ? null : Enum.valueOf(typeClass, source); } } private class LowercaseFormat extends Format { @Override public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) { return toAppendTo.append(String.valueOf(obj)); } @Override public Object parseObject(String source, ParsePosition pos) { return source == null ? null : Enum.valueOf(typeClass, source.toLowerCase(locale)); } } private class UppercaseFormat extends Format { @Override public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) { return toAppendTo.append(String.valueOf(obj)); } @Override public Object parseObject(String source, ParsePosition pos) { return source == null ? null : Enum.valueOf(typeClass, source.toUpperCase(locale)); } } private class CapitalizeFormat extends Format { @Override public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) { return toAppendTo.append(String.valueOf(obj)); } @Override public Object parseObject(String source, ParsePosition pos) { String firstLetter = source.substring(0, 1); String restOfTheString = source.substring(1, source.length()); return Enum.valueOf(typeClass, firstLetter.toUpperCase(locale) + restOfTheString); } } }
core/src/main/java/cucumber/runtime/xstream/ConverterWithEnumFormat.java
package cucumber.runtime.xstream; import cucumber.deps.com.thoughtworks.xstream.converters.ConversionException; import java.text.FieldPosition; import java.text.Format; import java.text.ParsePosition; import java.util.ArrayList; import java.util.List; import java.util.Locale; import static java.util.Arrays.asList; class ConverterWithEnumFormat<T extends Enum> extends ConverterWithFormat<T> { private final List<Format> formats = new ArrayList<Format>(); private final Locale locale; private final Class<? extends Enum> typeClass; ConverterWithEnumFormat(Locale locale, Class<? extends Enum> enumClass) { super(new Class[]{enumClass}); this.locale = locale; this.typeClass = enumClass; formats.add(new LowercaseFormat()); formats.add(new UppercaseFormat()); formats.add(new CapitalizeFormat()); } @Override public T transform(String string) { try { return super.transform(string); } catch (ConversionException e) { String allowed = asList(typeClass.getEnumConstants()).toString(); throw new ConversionException(String.format("Couldn't convert %s to %s. Legal values are %s", string, typeClass.getName(), allowed)); } } @Override public List<Format> getFormats() { return formats; } private class LowercaseFormat extends Format { @Override public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) { return toAppendTo.append(String.valueOf(obj)); } @Override public Object parseObject(String source, ParsePosition pos) { return source == null ? null : Enum.valueOf(typeClass, source.toLowerCase(locale)); } } private class UppercaseFormat extends Format { @Override public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) { return toAppendTo.append(String.valueOf(obj)); } @Override public Object parseObject(String source, ParsePosition pos) { return source == null ? null : Enum.valueOf(typeClass, source.toUpperCase(locale)); } } private class CapitalizeFormat extends Format { @Override public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) { return toAppendTo.append(String.valueOf(obj)); } @Override public Object parseObject(String source, ParsePosition pos) { String firstLetter = source.substring(0, 1); String restOfTheString = source.substring(1, source.length()); return Enum.valueOf(typeClass, firstLetter.toUpperCase(locale) + restOfTheString); } } }
OriginalFormat added to the enum formats to account for the edge case of mixed case enum member starting with lower case letter
core/src/main/java/cucumber/runtime/xstream/ConverterWithEnumFormat.java
OriginalFormat added to the enum formats to account for the edge case of mixed case enum member starting with lower case letter
<ide><path>ore/src/main/java/cucumber/runtime/xstream/ConverterWithEnumFormat.java <ide> super(new Class[]{enumClass}); <ide> this.locale = locale; <ide> this.typeClass = enumClass; <add> formats.add(new OriginalFormat()); <ide> formats.add(new LowercaseFormat()); <ide> formats.add(new UppercaseFormat()); <ide> formats.add(new CapitalizeFormat()); <ide> @Override <ide> public List<Format> getFormats() { <ide> return formats; <add> } <add> <add> private class OriginalFormat extends Format { <add> <add> @Override <add> public StringBuffer format(Object obj, StringBuffer toAppendTo, FieldPosition pos) { <add> return toAppendTo.append(String.valueOf(obj)); <add> } <add> <add> @Override <add> public Object parseObject(String source, ParsePosition pos) { <add> return source == null ? null : Enum.valueOf(typeClass, source); <add> } <ide> } <ide> <ide> private class LowercaseFormat extends Format {
JavaScript
mit
990219f8e21e0b10c9278d4a4c1ee06426235d5b
0
dperini/nwmatcher,dperini/nwmatcher,dperini/nwmatcher
NW.Dom.configure({ /* Disable complex selectors nested in :not() pseudo-classes to comply with specs See <http://www.w3.org/TR/2001/CR-css3-selectors-20011113/#negation> */ "SIMPLENOT": true, "VERBOSITY": true, "USE_QSAPI": true }); var getClass = Object.prototype.toString, /* Setting `RUN_BENCHMARKS` runs benchmarks on the following selectors: * * E[foo^="bar"] * E[foo$="bar"] * E[foo*="bar"] * E:first-child * E:last-child * E:only-child * E > F * E + F * E ~ F * ------------------------*/ RUN_BENCHMARKS = false, //The test runner runner = scotch("NWMatcher Unit Tests"); //Prototype's `$` function function getById(element){ var index, length, elements; if((length = arguments.length) > 1){ for(index = 0, elements = []; index < length; index++){ elements[elements.length] = getById(arguments[index]); } return elements; } if(getClass.call(element) === "[object String]"){ element = document.getElementById(element); } return element; } //The tests... (function(runner){ //NWMatcher methods; aliased for convenience var select = NW.Dom.select, match = NW.Dom.match; runner.addGroup("Basic Selectors").addTests(null, { "*": function(){ //Universal selector var results = [], nodes = document.getElementsByTagName("*"), index = 0, length = nodes.length, node; //Collect all element nodes, excluding comments (IE) for(; index < length; index++){ if((node = nodes[index]).tagName !== "!"){ results[results.length] = node; } } this.assertEquivalent(select("*"), results, "Comment nodes should be ignored."); }, "E": function(){ //Type selector var results = [], index = 0, nodes = document.getElementsByTagName("li"); while((results[index] = nodes[index++])){} results.length--; this.assertEquivalent(select("li"), results); this.assertEqual(select("strong", getById("fixtures"))[0], getById("strong")); this.assertEquivalent(select("nonexistent"), []); }, "#id": function(){ //ID selector this.assertEqual(select("#fixtures")[0], getById("fixtures")); this.assertEquivalent(select("nonexistent"), []); this.assertEqual(select("#troubleForm")[0], getById("troubleForm")); }, ".class": function(){ //Class selector this.assertEquivalent(select(".first"), getById('p', 'link_1', 'item_1')); this.assertEquivalent(select(".second"), []); }, "E#id": function(){ this.assertEqual(select("strong#strong")[0], getById("strong")); this.assertEquivalent(select("p#strong"), []); }, "E.class": function(){ var secondLink = getById("link_2"); this.assertEquivalent(select('a.internal'), getById('link_1', 'link_2')); this.assertEqual(select('a.internal.highlight')[0], secondLink); this.assertEqual(select('a.highlight.internal')[0], secondLink); this.assertEquivalent(select('a.highlight.internal.nonexistent'), []); }, "#id.class": function(){ var secondLink = getById('link_2'); this.assertEqual(select('#link_2.internal')[0], secondLink); this.assertEqual(select('.internal#link_2')[0], secondLink); this.assertEqual(select('#link_2.internal.highlight')[0], secondLink); this.assertEquivalent(select('#link_2.internal.nonexistent'), []); }, "E#id.class": function(){ var secondLink = getById('link_2'); this.assertEqual(select('a#link_2.internal')[0], secondLink); this.assertEqual(select('a.internal#link_2')[0], secondLink); this.assertEqual(select('li#item_1.first')[0], getById("item_1")); this.assertEquivalent(select('li#item_1.nonexistent'), []); this.assertEquivalent(select('li#item_1.first.nonexistent'), []); } }); runner.addGroup("Attribute Selectors").addTests(null, { "[foo]": function(){ this.assertEquivalent(select('[href]', document.body), select('a[href]', document.body)); this.assertEquivalent(select('[class~=internal]'), select('a[class~="internal"]')); this.assertEquivalent(select('[id]'), select('*[id]')); this.assertEquivalent(select('[type=radio]'), getById('checked_radio', 'unchecked_radio')); this.assertEquivalent(select('[type=checkbox]'), select('*[type=checkbox]')); this.assertEquivalent(select('[title]'), getById('with_title', 'commaParent')); this.assertEquivalent(select('#troubleForm [type=radio]'), select('#troubleForm *[type=radio]')); this.assertEquivalent(select('#troubleForm [type]'), select('#troubleForm *[type]')); }, "E[foo]": function(){ this.assertEquivalent(select('h1[class]'), select('#fixtures h1'), "h1[class]"); this.assertEquivalent(select('h1[CLASS]'), select('#fixtures h1'), "h1[CLASS]"); this.assertEqual(select('li#item_3[class]')[0], getById('item_3'), "li#item_3[class]"); this.assertEquivalent(select('#troubleForm2 input[name="brackets[5][]"]'), getById('chk_1', 'chk_2')); //Brackets in attribute value this.assertEqual(select('#troubleForm2 input[name="brackets[5][]"]:checked')[0], getById('chk_1')); //Space in attribute value this.assertEqual(select('cite[title="hello world!"]')[0], getById('with_title')); //Namespaced attributes this.assertEquivalent(select('[xml:lang]'), [document.documentElement, getById("item_3")]); this.assertEquivalent(select('*[xml:lang]'), [document.documentElement, getById("item_3")]); }, 'E[foo="bar"]': function(){ this.assertEquivalent(select('a[href="#"]'), getById('link_1', 'link_2', 'link_3')); this.assertThrowsException(/Error/, function(){ select('a[href=#]'); }); this.assertEqual(select('#troubleForm2 input[name="brackets[5][]"][value="2"]')[0], getById('chk_2')); }, 'E[foo~="bar"]': function(){ this.assertEquivalent(select('a[class~="internal"]'), getById('link_1', 'link_2'), "a[class~=\"internal\"]"); this.assertEquivalent(select('a[class~=internal]'), getById('link_1', 'link_2'), "a[class~=internal]"); this.assertEqual(select('a[class~=external][href="#"]')[0], getById('link_3'), 'a[class~=external][href="#"]'); }, 'E[foo|="en"]': function(){ this.assertEqual(select('*[xml:lang|="es"]')[0], getById('item_3')); this.assertEqual(select('*[xml:lang|="ES"]')[0], getById('item_3')); }, 'E[foo^="bar"]': function(){ this.assertEquivalent(select('div[class^=bro]'), getById('father', 'uncle'), 'matching beginning of string'); this.assertEquivalent(select('#level1 *[id^="level2_"]'), getById('level2_1', 'level2_2', 'level2_3')); this.assertEquivalent(select('#level1 *[id^=level2_]'), getById('level2_1', 'level2_2', 'level2_3')); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[id^=level2_]'); }, 1000); }, 500); } }, 'E[foo$="bar"]': function(){ this.assertEquivalent(select('div[class$=men]'), getById('father', 'uncle'), 'matching end of string'); this.assertEquivalent(select('#level1 *[id$="_1"]'), getById('level2_1', 'level3_1')); this.assertEquivalent(select('#level1 *[id$=_1]'), getById('level2_1', 'level3_1')); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[id$=_1]'); }, 1000); }, 500); } }, 'E[foo*="bar"]': function(){ this.assertEquivalent(select('div[class*="ers m"]'), getById('father', 'uncle'), 'matching substring'); this.assertEquivalent(select('#level1 *[id*="2"]'), getById('level2_1', 'level3_2', 'level2_2', 'level2_3')); this.assertThrowsException(/Error/, function(){ select('#level1 *[id*=2]'); }); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[id*=2]'); }, 1000); }, 500); } }, // *** these should throw SYNTAX_ERR *** 'E[id=-1]': function(){ this.assertThrowsException(/Error/, function(){ select('#level1 *[id=-1]'); }); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[id=9]'); }, 1000); }, 500); } }, 'E[class=-45deg]': function(){ this.assertThrowsException(/Error/, function(){ select('#level1 *[class=-45deg]'); }); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[class=-45deg]'); }, 1000); }, 500); } }, 'E[class=8mm]': function(){ this.assertThrowsException(/Error/, function(){ select('#level1 *[class=8mm]'); }); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[class=8mm]'); }, 1000); }, 500); } } }); runner.addGroup("Structural pseudo-classes").addTests(null, { "E:first-child": function(){ this.assertEqual(select('#level1>*:first-child')[0], getById('level2_1')); this.assertEquivalent(select('#level1 *:first-child'), getById('level2_1', 'level3_1', 'level_only_child')); this.assertEquivalent(select('#level1>div:first-child'), []); this.assertEquivalent(select('#level1 span:first-child'), getById('level2_1', 'level3_1')); this.assertEquivalent(select('#level1:first-child'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *:first-child'); }, 1000); }, 500); } }, "E:last-child": function(){ this.assertEqual(select('#level1>*:last-child')[0], getById('level2_3')); this.assertEquivalent(select('#level1 *:last-child'), getById('level3_2', 'level_only_child', 'level2_3')); this.assertEqual(select('#level1>div:last-child')[0], getById('level2_3')); this.assertEqual(select('#level1 div:last-child')[0], getById('level2_3')); this.assertEquivalent(select('#level1>span:last-child'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *:last-child'); }, 1000); }, 500); } }, "E:nth-child(n)": function(){ this.assertEqual(select('#p *:nth-child(3)')[0], getById('link_2')); this.assertEqual(select('#p a:nth-child(3)')[0], getById('link_2'), 'nth-child'); this.assertEquivalent(select('#list > li:nth-child(n+2)'), getById('item_2', 'item_3')); this.assertEquivalent(select('#list > li:nth-child(-n+2)'), getById('item_1', 'item_2')); }, "E:nth-of-type(n)": function(){ this.assertEqual(select('#p a:nth-of-type(2)')[0], getById('link_2'), 'nth-of-type'); this.assertEqual(select('#p a:nth-of-type(1)')[0], getById('link_1'), 'nth-of-type'); }, "E:nth-last-of-type(n)": function(){ this.assertEqual(select('#p a:nth-last-of-type(1)')[0], getById('link_2'), 'nth-last-of-type'); }, "E:first-of-type": function(){ this.assertEqual(select('#p a:first-of-type')[0], getById('link_1'), 'first-of-type'); }, "E:last-of-type": function(){ this.assertEqual(select('#p a:last-of-type')[0], getById('link_2'), 'last-of-type'); }, "E:only-child": function(){ this.assertEqual(select('#level1 *:only-child')[0], getById('level_only_child')); //Shouldn't return anything this.assertEquivalent(select('#level1>*:only-child'), []); this.assertEquivalent(select('#level1:only-child'), []); this.assertEquivalent(select('#level2_2 :only-child:not(:last-child)'), []); this.assertEquivalent(select('#level2_2 :only-child:not(:first-child)'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *:only-child'); }, 1000); }, 500); } }, "E:empty": function(){ getById('level3_1').innerHTML = ""; if(document.createEvent){ this.assertEquivalent(select('#level1 *:empty'), getById('level3_1', 'level3_2', 'level2_3'), '#level1 *:empty'); this.assertEquivalent(select('#level_only_child:empty'), [], 'newlines count as content!'); }else{ this.assertEqual(select('#level3_1:empty')[0], getById('level3_1'), 'IE forced empty content!'); //this.skip("IE forced empty content!"); } //Shouldn't return anything this.assertEquivalent(select('span:empty > *'), []); } }); runner.addTests(null, { "E:not(s)": function(){ //Negation pseudo-class this.assertEquivalent(select('a:not([href="#"])'), []); this.assertEquivalent(select('div.brothers:not(.brothers)'), []); this.assertEquivalent(select('a[class~=external]:not([href="#"])'), [], 'a[class~=external][href!="#"]'); this.assertEqual(select('#p a:not(:first-of-type)')[0], getById('link_2'), 'first-of-type'); this.assertEqual(select('#p a:not(:last-of-type)')[0], getById('link_1'), 'last-of-type'); this.assertEqual(select('#p a:not(:nth-of-type(1))')[0], getById('link_2'), 'nth-of-type'); this.assertEqual(select('#p a:not(:nth-last-of-type(1))')[0], getById('link_1'), 'nth-last-of-type'); this.assertEqual(select('#p a:not([rel~=nofollow])')[0], getById('link_2'), 'attribute 1'); this.assertEqual(select('#p a:not([rel^=external])')[0], getById('link_2'), 'attribute 2'); this.assertEqual(select('#p a:not([rel$=nofollow])')[0], getById('link_2'), 'attribute 3'); this.assertEqual(select('#p a:not([rel$="nofollow"]) > em')[0], getById('em'), 'attribute 4'); this.assertEqual(select('#list li:not(#item_1):not(#item_3)')[0], getById('item_2'), 'adjacent :not clauses'); this.assertEqual(select('#grandfather > div:not(#uncle) #son')[0], getById('son')); this.assertEqual(select('#p a:not([rel$="nofollow"]) em')[0], getById('em'), 'attribute 4 + all descendants'); this.assertEqual(select('#p a:not([rel$="nofollow"])>em')[0], getById('em'), 'attribute 4 (without whitespace)'); } }); runner.addGroup("UI element states pseudo-classes").addTests(null, { "E:disabled": function(){ this.assertEqual(select('#troubleForm > p > *:disabled')[0], getById('disabled_text_field')); }, "E:checked": function(){ this.assertEquivalent(select('#troubleForm *:checked'), getById('checked_box', 'checked_radio')); } }); runner.addGroup("Combinators").addTests(null, { "E F": function(){ //Descendant this.assertEquivalent(select('#fixtures a *'), getById('em2', 'em', 'span')); this.assertEqual(select('div#fixtures p')[0], getById("p")); }, "E + F": function(){ //Adjacent sibling this.assertEqual(select('div.brothers + div.brothers')[0], getById("uncle")); this.assertEqual(select('div.brothers + div')[0], getById('uncle')); this.assertEqual(select('#level2_1+span')[0], getById('level2_2')); this.assertEqual(select('#level2_1 + span')[0], getById('level2_2')); this.assertEqual(select('#level2_1 + *')[0], getById('level2_2')); this.assertEquivalent(select('#level2_2 + span'), []); this.assertEqual(select('#level3_1 + span')[0], getById('level3_2')); this.assertEqual(select('#level3_1 + *')[0], getById('level3_2')); this.assertEquivalent(select('#level3_2 + *'), []); this.assertEquivalent(select('#level3_1 + em'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level3_1 + span'); }, 1000); }, 500); } }, "E > F": function(){ //Child this.assertEquivalent(select('p.first > a'), getById('link_1', 'link_2')); this.assertEquivalent(select('div#grandfather > div'), getById('father', 'uncle')); this.assertEquivalent(select('#level1>span'), getById('level2_1', 'level2_2')); this.assertEquivalent(select('#level1 > span'), getById('level2_1', 'level2_2')); this.assertEquivalent(select('#level2_1 > *'), getById('level3_1', 'level3_2')); this.assertEquivalent(select('div > #nonexistent'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 > span'); }, 1000); }, 500); } }, "E ~ F": function(){ //General sibling this.assertEqual(select('h1 ~ ul')[0], getById('list')); this.assertEquivalent(select('#level2_2 ~ span'), []); this.assertEquivalent(select('#level3_2 ~ *'), []); this.assertEquivalent(select('#level3_1 ~ em'), []); this.assertEquivalent(select('div ~ #level3_2'), []); this.assertEquivalent(select('div ~ #level2_3'), []); this.assertEqual(select('#level2_1 ~ span')[0], getById('level2_2')); this.assertEquivalent(select('#level2_1 ~ *'), getById('level2_2', 'level2_3')); this.assertEqual(select('#level3_1 ~ #level3_2')[0], getById('level3_2')); this.assertEqual(select('span ~ #level3_2')[0], getById('level3_2')); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level2_1 ~ span'); }, 1000); }, 500); } } }); runner.addTests(null, { "NW.Dom.match": function(){ var element = getById('dupL1'); //Assertions this.assert(match(element, 'span')); this.assert(match(element, "span#dupL1")); this.assert(match(element, "div > span"), "child combinator"); this.assert(match(element, "#dupContainer span"), "descendant combinator"); this.assert(match(element, "#dupL1"), "ID only"); this.assert(match(element, "span.span_foo"), "class name 1"); this.assert(match(element, "span.span_bar"), "class name 2"); this.assert(match(element, "span:first-child"), "first-child pseudoclass"); //Refutations this.refute(match(element, "span.span_wtf"), "bogus class name"); this.refute(match(element, "#dupL2"), "different ID"); this.refute(match(element, "div"), "different tag name"); this.refute(match(element, "span span"), "different ancestry"); this.refute(match(element, "span > span"), "different parent"); this.refute(match(element, "span:nth-child(5)"), "different pseudoclass"); //Misc. this.refute(match(getById('link_2'), 'a[rel^=external]')); this.assert(match(getById('link_1'), 'a[rel^=external]')); this.assert(match(getById('link_1'), 'a[rel^="external"]')); this.assert(match(getById('link_1'), "a[rel^='external']")); }, "Equivalent Selectors": function(){ this.assertEquivalent(select('div.brothers'), select('div[class~=brothers]')); this.assertEquivalent(select('div.brothers'), select('div[class~=brothers].brothers')); this.assertEquivalent(select('div:not(.brothers)'), select('div:not([class~=brothers])')); this.assertEquivalent(select('li ~ li'), select('li:not(:first-child)')); this.assertEquivalent(select('ul > li'), select('ul > li:nth-child(n)')); this.assertEquivalent(select('ul > li:nth-child(even)'), select('ul > li:nth-child(2n)')); this.assertEquivalent(select('ul > li:nth-child(odd)'), select('ul > li:nth-child(2n+1)')); this.assertEquivalent(select('ul > li:first-child'), select('ul > li:nth-child(1)')); this.assertEquivalent(select('ul > li:last-child'), select('ul > li:nth-last-child(1)')); /* Opera 10 does not accept values > 128 as a parameter to :nth-child See <http://operawiki.info/ArtificialLimits> */ this.assertEquivalent(select('ul > li:nth-child(n-128)'), select('ul > li')); this.assertEquivalent(select('ul>li'), select('ul > li')); this.assertEquivalent(select('#p a:not([rel$="nofollow"])>em'), select('#p a:not([rel$="nofollow"]) > em')); }, "Multiple Selectors": function(){ //The next two assertions should return document-ordered lists of matching elements --Diego Perini this.assertEquivalent(select('#list, .first,*[xml:lang="es-us"] , #troubleForm'), getById('p', 'link_1', 'list', 'item_1', 'item_3', 'troubleForm')); this.assertEquivalent(select('#list, .first, *[xml:lang="es-us"], #troubleForm'), getById('p', 'link_1', 'list', 'item_1', 'item_3', 'troubleForm')); this.assertEquivalent(select('form[title*="commas,"], input[value="#commaOne,#commaTwo"]'), getById('commaParent', 'commaChild')); this.assertEquivalent(select('form[title*="commas,"], input[value="#commaOne,#commaTwo"]'), getById('commaParent', 'commaChild')); } }); }(runner));
test/scotch/test.js
NW.Dom.configure({ /* Disable complex selectors nested in :not() pseudo-classes to comply with specs See <http://www.w3.org/TR/2001/CR-css3-selectors-20011113/#negation> */ "SIMPLENOT": true, "VERBOSITY": true, "USE_QSAPI": true }); var getClass = Object.prototype.toString, /* Setting `RUN_BENCHMARKS` runs benchmarks on the following selectors: * * E[foo^="bar"] * E[foo$="bar"] * E[foo*="bar"] * E:first-child * E:last-child * E:only-child * E > F * E + F * E ~ F * ------------------------*/ RUN_BENCHMARKS = false, //The test runner runner = scotch("NWMatcher Unit Tests"); //Prototype's `$` function function getById(element){ var index, length, elements; if((length = arguments.length) > 1){ for(index = 0, elements = []; index < length; index++){ elements[elements.length] = getById(arguments[index]); } return elements; } if(getClass.call(element) === "[object String]"){ element = document.getElementById(element); } return element; } //The tests... (function(runner){ //NWMatcher methods; aliased for convenience var select = NW.Dom.select, match = NW.Dom.match; runner.addGroup("Basic Selectors").addTests(null, { "*": function(){ //Universal selector var results = [], nodes = document.getElementsByTagName("*"), index = 0, length = nodes.length, node; //Collect all element nodes, excluding comments (IE) for(; index < length; index++){ if((node = nodes[index]).tagName !== "!"){ results[results.length] = node; } } this.assertEquivalent(select("*"), results, "Comment nodes should be ignored."); }, "E": function(){ //Type selector var results = [], index = 0, nodes = document.getElementsByTagName("li"); while((results[index] = nodes[index++])){} results.length--; this.assertEquivalent(select("li"), results); this.assertEqual(select("strong", getById("fixtures"))[0], getById("strong")); this.assertEquivalent(select("nonexistent"), []); }, "#id": function(){ //ID selector this.assertEqual(select("#fixtures")[0], getById("fixtures")); this.assertEquivalent(select("nonexistent"), []); this.assertEqual(select("#troubleForm")[0], getById("troubleForm")); }, ".class": function(){ //Class selector this.assertEquivalent(select(".first"), getById('p', 'link_1', 'item_1')); this.assertEquivalent(select(".second"), []); }, "E#id": function(){ this.assertEqual(select("strong#strong")[0], getById("strong")); this.assertEquivalent(select("p#strong"), []); }, "E.class": function(){ var secondLink = getById("link_2"); this.assertEquivalent(select('a.internal'), getById('link_1', 'link_2')); this.assertEqual(select('a.internal.highlight')[0], secondLink); this.assertEqual(select('a.highlight.internal')[0], secondLink); this.assertEquivalent(select('a.highlight.internal.nonexistent'), []); }, "#id.class": function(){ var secondLink = getById('link_2'); this.assertEqual(select('#link_2.internal')[0], secondLink); this.assertEqual(select('.internal#link_2')[0], secondLink); this.assertEqual(select('#link_2.internal.highlight')[0], secondLink); this.assertEquivalent(select('#link_2.internal.nonexistent'), []); }, "E#id.class": function(){ var secondLink = getById('link_2'); this.assertEqual(select('a#link_2.internal')[0], secondLink); this.assertEqual(select('a.internal#link_2')[0], secondLink); this.assertEqual(select('li#item_1.first')[0], getById("item_1")); this.assertEquivalent(select('li#item_1.nonexistent'), []); this.assertEquivalent(select('li#item_1.first.nonexistent'), []); } }); runner.addGroup("Attribute Selectors").addTests(null, { "[foo]": function(){ this.assertEquivalent(select('[href]', document.body), select('a[href]', document.body)); this.assertEquivalent(select('[class~=internal]'), select('a[class~="internal"]')); this.assertEquivalent(select('[id]'), select('*[id]')); this.assertEquivalent(select('[type=radio]'), getById('checked_radio', 'unchecked_radio')); this.assertEquivalent(select('[type=checkbox]'), select('*[type=checkbox]')); this.assertEquivalent(select('[title]'), getById('with_title', 'commaParent')); this.assertEquivalent(select('#troubleForm [type=radio]'), select('#troubleForm *[type=radio]')); this.assertEquivalent(select('#troubleForm [type]'), select('#troubleForm *[type]')); }, "E[foo]": function(){ this.assertEquivalent(select('h1[class]'), select('#fixtures h1'), "h1[class]"); this.assertEquivalent(select('h1[CLASS]'), select('#fixtures h1'), "h1[CLASS]"); this.assertEqual(select('li#item_3[class]')[0], getById('item_3'), "li#item_3[class]"); this.assertEquivalent(select('#troubleForm2 input[name="brackets[5][]"]'), getById('chk_1', 'chk_2')); //Brackets in attribute value this.assertEqual(select('#troubleForm2 input[name="brackets[5][]"]:checked')[0], getById('chk_1')); //Space in attribute value this.assertEqual(select('cite[title="hello world!"]')[0], getById('with_title')); //Namespaced attributes this.assertEquivalent(select('[xml:lang]'), [document.documentElement, getById("item_3")]); this.assertEquivalent(select('*[xml:lang]'), [document.documentElement, getById("item_3")]); }, 'E[foo="bar"]': function(){ this.assertEquivalent(select('a[href="#"]'), getById('link_1', 'link_2', 'link_3')); this.assertThrowsException(/SYNTAX_ERR/, function(){ select('a[href=#]'); }); this.assertEqual(select('#troubleForm2 input[name="brackets[5][]"][value="2"]')[0], getById('chk_2')); }, 'E[foo~="bar"]': function(){ this.assertEquivalent(select('a[class~="internal"]'), getById('link_1', 'link_2'), "a[class~=\"internal\"]"); this.assertEquivalent(select('a[class~=internal]'), getById('link_1', 'link_2'), "a[class~=internal]"); this.assertEqual(select('a[class~=external][href="#"]')[0], getById('link_3'), 'a[class~=external][href="#"]'); }, 'E[foo|="en"]': function(){ this.assertEqual(select('*[xml:lang|="es"]')[0], getById('item_3')); this.assertEqual(select('*[xml:lang|="ES"]')[0], getById('item_3')); }, 'E[foo^="bar"]': function(){ this.assertEquivalent(select('div[class^=bro]'), getById('father', 'uncle'), 'matching beginning of string'); this.assertEquivalent(select('#level1 *[id^="level2_"]'), getById('level2_1', 'level2_2', 'level2_3')); this.assertEquivalent(select('#level1 *[id^=level2_]'), getById('level2_1', 'level2_2', 'level2_3')); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[id^=level2_]'); }, 1000); }, 500); } }, 'E[foo$="bar"]': function(){ this.assertEquivalent(select('div[class$=men]'), getById('father', 'uncle'), 'matching end of string'); this.assertEquivalent(select('#level1 *[id$="_1"]'), getById('level2_1', 'level3_1')); this.assertEquivalent(select('#level1 *[id$=_1]'), getById('level2_1', 'level3_1')); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[id$=_1]'); }, 1000); }, 500); } }, 'E[foo*="bar"]': function(){ this.assertEquivalent(select('div[class*="ers m"]'), getById('father', 'uncle'), 'matching substring'); this.assertEquivalent(select('#level1 *[id*="2"]'), getById('level2_1', 'level3_2', 'level2_2', 'level2_3')); this.assertThrowsException(/SYNTAX_ERR/, function(){ select('#level1 *[id*=2]'); }); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[id*=2]'); }, 1000); }, 500); } }, // *** these should throw SYNTAX_ERR *** 'E[id=-1]': function(){ this.assertThrowsException(/SYNTAX_ERR/, function(){ select('#level1 *[id=-1]'); }); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[id=9]'); }, 1000); }, 500); } }, 'E[class=-45deg]': function(){ this.assertThrowsException(/SYNTAX_ERR/, function(){ select('#level1 *[class=-45deg]'); }); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[class=-45deg]'); }, 1000); }, 500); } }, 'E[class=8mm]': function(){ this.assertThrowsException(/SYNTAX_ERR/, function(){ select('#level1 *[class=8mm]'); }); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *[class=8mm]'); }, 1000); }, 500); } } }); runner.addGroup("Structural pseudo-classes").addTests(null, { "E:first-child": function(){ this.assertEqual(select('#level1>*:first-child')[0], getById('level2_1')); this.assertEquivalent(select('#level1 *:first-child'), getById('level2_1', 'level3_1', 'level_only_child')); this.assertEquivalent(select('#level1>div:first-child'), []); this.assertEquivalent(select('#level1 span:first-child'), getById('level2_1', 'level3_1')); this.assertEquivalent(select('#level1:first-child'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *:first-child'); }, 1000); }, 500); } }, "E:last-child": function(){ this.assertEqual(select('#level1>*:last-child')[0], getById('level2_3')); this.assertEquivalent(select('#level1 *:last-child'), getById('level3_2', 'level_only_child', 'level2_3')); this.assertEqual(select('#level1>div:last-child')[0], getById('level2_3')); this.assertEqual(select('#level1 div:last-child')[0], getById('level2_3')); this.assertEquivalent(select('#level1>span:last-child'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *:last-child'); }, 1000); }, 500); } }, "E:nth-child(n)": function(){ this.assertEqual(select('#p *:nth-child(3)')[0], getById('link_2')); this.assertEqual(select('#p a:nth-child(3)')[0], getById('link_2'), 'nth-child'); this.assertEquivalent(select('#list > li:nth-child(n+2)'), getById('item_2', 'item_3')); this.assertEquivalent(select('#list > li:nth-child(-n+2)'), getById('item_1', 'item_2')); }, "E:nth-of-type(n)": function(){ this.assertEqual(select('#p a:nth-of-type(2)')[0], getById('link_2'), 'nth-of-type'); this.assertEqual(select('#p a:nth-of-type(1)')[0], getById('link_1'), 'nth-of-type'); }, "E:nth-last-of-type(n)": function(){ this.assertEqual(select('#p a:nth-last-of-type(1)')[0], getById('link_2'), 'nth-last-of-type'); }, "E:first-of-type": function(){ this.assertEqual(select('#p a:first-of-type')[0], getById('link_1'), 'first-of-type'); }, "E:last-of-type": function(){ this.assertEqual(select('#p a:last-of-type')[0], getById('link_2'), 'last-of-type'); }, "E:only-child": function(){ this.assertEqual(select('#level1 *:only-child')[0], getById('level_only_child')); //Shouldn't return anything this.assertEquivalent(select('#level1>*:only-child'), []); this.assertEquivalent(select('#level1:only-child'), []); this.assertEquivalent(select('#level2_2 :only-child:not(:last-child)'), []); this.assertEquivalent(select('#level2_2 :only-child:not(:first-child)'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 *:only-child'); }, 1000); }, 500); } }, "E:empty": function(){ getById('level3_1').innerHTML = ""; if(document.createEvent){ this.assertEquivalent(select('#level1 *:empty'), getById('level3_1', 'level3_2', 'level2_3'), '#level1 *:empty'); this.assertEquivalent(select('#level_only_child:empty'), [], 'newlines count as content!'); }else{ this.assertEqual(select('#level3_1:empty')[0], getById('level3_1'), 'IE forced empty content!'); this.skip("IE forced empty content!"); } //Shouldn't return anything this.assertEquivalent(select('span:empty > *'), []); } }); runner.addTests(null, { "E:not(s)": function(){ //Negation pseudo-class this.assertEquivalent(select('a:not([href="#"])'), []); this.assertEquivalent(select('div.brothers:not(.brothers)'), []); this.assertEquivalent(select('a[class~=external]:not([href="#"])'), [], 'a[class~=external][href!="#"]'); this.assertEqual(select('#p a:not(:first-of-type)')[0], getById('link_2'), 'first-of-type'); this.assertEqual(select('#p a:not(:last-of-type)')[0], getById('link_1'), 'last-of-type'); this.assertEqual(select('#p a:not(:nth-of-type(1))')[0], getById('link_2'), 'nth-of-type'); this.assertEqual(select('#p a:not(:nth-last-of-type(1))')[0], getById('link_1'), 'nth-last-of-type'); this.assertEqual(select('#p a:not([rel~=nofollow])')[0], getById('link_2'), 'attribute 1'); this.assertEqual(select('#p a:not([rel^=external])')[0], getById('link_2'), 'attribute 2'); this.assertEqual(select('#p a:not([rel$=nofollow])')[0], getById('link_2'), 'attribute 3'); this.assertEqual(select('#p a:not([rel$="nofollow"]) > em')[0], getById('em'), 'attribute 4'); this.assertEqual(select('#list li:not(#item_1):not(#item_3)')[0], getById('item_2'), 'adjacent :not clauses'); this.assertEqual(select('#grandfather > div:not(#uncle) #son')[0], getById('son')); this.assertEqual(select('#p a:not([rel$="nofollow"]) em')[0], getById('em'), 'attribute 4 + all descendants'); this.assertEqual(select('#p a:not([rel$="nofollow"])>em')[0], getById('em'), 'attribute 4 (without whitespace)'); } }); runner.addGroup("UI element states pseudo-classes").addTests(null, { "E:disabled": function(){ this.assertEqual(select('#troubleForm > p > *:disabled')[0], getById('disabled_text_field')); }, "E:checked": function(){ this.assertEquivalent(select('#troubleForm *:checked'), getById('checked_box', 'checked_radio')); } }); runner.addGroup("Combinators").addTests(null, { "E F": function(){ //Descendant this.assertEquivalent(select('#fixtures a *'), getById('em2', 'em', 'span')); this.assertEqual(select('div#fixtures p')[0], getById("p")); }, "E + F": function(){ //Adjacent sibling this.assertEqual(select('div.brothers + div.brothers')[0], getById("uncle")); this.assertEqual(select('div.brothers + div')[0], getById('uncle')); this.assertEqual(select('#level2_1+span')[0], getById('level2_2')); this.assertEqual(select('#level2_1 + span')[0], getById('level2_2')); this.assertEqual(select('#level2_1 + *')[0], getById('level2_2')); this.assertEquivalent(select('#level2_2 + span'), []); this.assertEqual(select('#level3_1 + span')[0], getById('level3_2')); this.assertEqual(select('#level3_1 + *')[0], getById('level3_2')); this.assertEquivalent(select('#level3_2 + *'), []); this.assertEquivalent(select('#level3_1 + em'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level3_1 + span'); }, 1000); }, 500); } }, "E > F": function(){ //Child this.assertEquivalent(select('p.first > a'), getById('link_1', 'link_2')); this.assertEquivalent(select('div#grandfather > div'), getById('father', 'uncle')); this.assertEquivalent(select('#level1>span'), getById('level2_1', 'level2_2')); this.assertEquivalent(select('#level1 > span'), getById('level2_1', 'level2_2')); this.assertEquivalent(select('#level2_1 > *'), getById('level3_1', 'level3_2')); this.assertEquivalent(select('div > #nonexistent'), []); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level1 > span'); }, 1000); }, 500); } }, "E ~ F": function(){ //General sibling this.assertEqual(select('h1 ~ ul')[0], getById('list')); this.assertEquivalent(select('#level2_2 ~ span'), []); this.assertEquivalent(select('#level3_2 ~ *'), []); this.assertEquivalent(select('#level3_1 ~ em'), []); this.assertEquivalent(select('div ~ #level3_2'), []); this.assertEquivalent(select('div ~ #level2_3'), []); this.assertEqual(select('#level2_1 ~ span')[0], getById('level2_2')); this.assertEquivalent(select('#level2_1 ~ *'), getById('level2_2', 'level2_3')); this.assertEqual(select('#level3_1 ~ #level3_2')[0], getById('level3_2')); this.assertEqual(select('span ~ #level3_2')[0], getById('level3_2')); if(RUN_BENCHMARKS){ this.wait(function(){ this.benchmark(function(){ select('#level2_1 ~ span'); }, 1000); }, 500); } } }); runner.addTests(null, { "NW.Dom.match": function(){ var element = getById('dupL1'); //Assertions this.assert(match(element, 'span')); this.assert(match(element, "span#dupL1")); this.assert(match(element, "div > span"), "child combinator"); this.assert(match(element, "#dupContainer span"), "descendant combinator"); this.assert(match(element, "#dupL1"), "ID only"); this.assert(match(element, "span.span_foo"), "class name 1"); this.assert(match(element, "span.span_bar"), "class name 2"); this.assert(match(element, "span:first-child"), "first-child pseudoclass"); //Refutations this.refute(match(element, "span.span_wtf"), "bogus class name"); this.refute(match(element, "#dupL2"), "different ID"); this.refute(match(element, "div"), "different tag name"); this.refute(match(element, "span span"), "different ancestry"); this.refute(match(element, "span > span"), "different parent"); this.refute(match(element, "span:nth-child(5)"), "different pseudoclass"); //Misc. this.refute(match(getById('link_2'), 'a[rel^=external]')); this.assert(match(getById('link_1'), 'a[rel^=external]')); this.assert(match(getById('link_1'), 'a[rel^="external"]')); this.assert(match(getById('link_1'), "a[rel^='external']")); }, "Equivalent Selectors": function(){ this.assertEquivalent(select('div.brothers'), select('div[class~=brothers]')); this.assertEquivalent(select('div.brothers'), select('div[class~=brothers].brothers')); this.assertEquivalent(select('div:not(.brothers)'), select('div:not([class~=brothers])')); this.assertEquivalent(select('li ~ li'), select('li:not(:first-child)')); this.assertEquivalent(select('ul > li'), select('ul > li:nth-child(n)')); this.assertEquivalent(select('ul > li:nth-child(even)'), select('ul > li:nth-child(2n)')); this.assertEquivalent(select('ul > li:nth-child(odd)'), select('ul > li:nth-child(2n+1)')); this.assertEquivalent(select('ul > li:first-child'), select('ul > li:nth-child(1)')); this.assertEquivalent(select('ul > li:last-child'), select('ul > li:nth-last-child(1)')); /* Opera 10 does not accept values > 128 as a parameter to :nth-child See <http://operawiki.info/ArtificialLimits> */ this.assertEquivalent(select('ul > li:nth-child(n-128)'), select('ul > li')); this.assertEquivalent(select('ul>li'), select('ul > li')); this.assertEquivalent(select('#p a:not([rel$="nofollow"])>em'), select('#p a:not([rel$="nofollow"]) > em')); }, "Multiple Selectors": function(){ //The next two assertions should return document-ordered lists of matching elements --Diego Perini this.assertEquivalent(select('#list, .first,*[xml:lang="es-us"] , #troubleForm'), getById('p', 'link_1', 'list', 'item_1', 'item_3', 'troubleForm')); this.assertEquivalent(select('#list, .first, *[xml:lang="es-us"], #troubleForm'), getById('p', 'link_1', 'list', 'item_1', 'item_3', 'troubleForm')); this.assertEquivalent(select('form[title*="commas,"], input[value="#commaOne,#commaTwo"]'), getById('commaParent', 'commaChild')); this.assertEquivalent(select('form[title*="commas,"], input[value="#commaOne,#commaTwo"]'), getById('commaParent', 'commaChild')); } }); }(runner));
updated Scotch tests to detect the type of 'Error()' thrown
test/scotch/test.js
updated Scotch tests to detect the type of 'Error()' thrown
<ide><path>est/scotch/test.js <ide> }, <ide> 'E[foo="bar"]': function(){ <ide> this.assertEquivalent(select('a[href="#"]'), getById('link_1', 'link_2', 'link_3')); <del> this.assertThrowsException(/SYNTAX_ERR/, function(){ <add> this.assertThrowsException(/Error/, function(){ <ide> select('a[href=#]'); <ide> }); <ide> this.assertEqual(select('#troubleForm2 input[name="brackets[5][]"][value="2"]')[0], getById('chk_2')); <ide> 'E[foo*="bar"]': function(){ <ide> this.assertEquivalent(select('div[class*="ers m"]'), getById('father', 'uncle'), 'matching substring'); <ide> this.assertEquivalent(select('#level1 *[id*="2"]'), getById('level2_1', 'level3_2', 'level2_2', 'level2_3')); <del> this.assertThrowsException(/SYNTAX_ERR/, function(){ <add> this.assertThrowsException(/Error/, function(){ <ide> select('#level1 *[id*=2]'); <ide> }); <ide> if(RUN_BENCHMARKS){ <ide> // *** these should throw SYNTAX_ERR *** <ide> <ide> 'E[id=-1]': function(){ <del> this.assertThrowsException(/SYNTAX_ERR/, function(){ <add> this.assertThrowsException(/Error/, function(){ <ide> select('#level1 *[id=-1]'); <ide> }); <ide> if(RUN_BENCHMARKS){ <ide> } <ide> }, <ide> 'E[class=-45deg]': function(){ <del> this.assertThrowsException(/SYNTAX_ERR/, function(){ <add> this.assertThrowsException(/Error/, function(){ <ide> select('#level1 *[class=-45deg]'); <ide> }); <ide> if(RUN_BENCHMARKS){ <ide> } <ide> }, <ide> 'E[class=8mm]': function(){ <del> this.assertThrowsException(/SYNTAX_ERR/, function(){ <add> this.assertThrowsException(/Error/, function(){ <ide> select('#level1 *[class=8mm]'); <ide> }); <ide> if(RUN_BENCHMARKS){ <ide> this.assertEquivalent(select('#level_only_child:empty'), [], 'newlines count as content!'); <ide> }else{ <ide> this.assertEqual(select('#level3_1:empty')[0], getById('level3_1'), 'IE forced empty content!'); <del> this.skip("IE forced empty content!"); <add> //this.skip("IE forced empty content!"); <ide> } <ide> //Shouldn't return anything <ide> this.assertEquivalent(select('span:empty > *'), []);
Java
apache-2.0
57e375e0ecf213c6a65d77812c24042eb9c52ad2
0
fengbaicanhe/intellij-community,holmes/intellij-community,asedunov/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,fitermay/intellij-community,slisson/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,xfournet/intellij-community,jexp/idea2,fitermay/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community,ftomassetti/intellij-community,caot/intellij-community,retomerz/intellij-community,jexp/idea2,akosyakov/intellij-community,xfournet/intellij-community,kdwink/intellij-community,samthor/intellij-community,samthor/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,samthor/intellij-community,fnouama/intellij-community,asedunov/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,supersven/intellij-community,fnouama/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,jagguli/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,ryano144/intellij-community,semonte/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,ernestp/consulo,Lekanich/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,samthor/intellij-community,apixandru/intellij-community,semonte/intellij-community,tmpgit/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,xfournet/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,wreckJ/intellij-community,da1z/intellij-community,slisson/intellij-community,jagguli/intellij-community,dslomov/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,amith01994/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,kool79/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,kdwink/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,suncycheng/intellij-community,da1z/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,da1z/intellij-community,jexp/idea2,kool79/intellij-community,vladmm/intellij-community,signed/intellij-community,dslomov/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,holmes/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,ernestp/consulo,FHannes/intellij-community,hurricup/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,semonte/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,adedayo/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,akosyakov/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,robovm/robovm-studio,TangHao1987/intellij-community,adedayo/intellij-community,FHannes/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,signed/intellij-community,FHannes/intellij-community,semonte/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,apixandru/intellij-community,FHannes/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,holmes/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,fitermay/intellij-community,blademainer/intellij-community,petteyg/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,consulo/consulo,ahb0327/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,consulo/consulo,Distrotech/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,semonte/intellij-community,allotria/intellij-community,izonder/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,vladmm/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,consulo/consulo,robovm/robovm-studio,SerCeMan/intellij-community,kdwink/intellij-community,xfournet/intellij-community,caot/intellij-community,Lekanich/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,ibinti/intellij-community,asedunov/intellij-community,signed/intellij-community,fnouama/intellij-community,joewalnes/idea-community,signed/intellij-community,ibinti/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,allotria/intellij-community,signed/intellij-community,da1z/intellij-community,jexp/idea2,ftomassetti/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,izonder/intellij-community,robovm/robovm-studio,holmes/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,semonte/intellij-community,ryano144/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,clumsy/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,da1z/intellij-community,dslomov/intellij-community,caot/intellij-community,kool79/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,wreckJ/intellij-community,supersven/intellij-community,petteyg/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,consulo/consulo,supersven/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,consulo/consulo,pwoodworth/intellij-community,asedunov/intellij-community,ernestp/consulo,orekyuu/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,jexp/idea2,michaelgallacher/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,blademainer/intellij-community,ibinti/intellij-community,apixandru/intellij-community,supersven/intellij-community,izonder/intellij-community,suncycheng/intellij-community,da1z/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,izonder/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,signed/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,semonte/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,amith01994/intellij-community,ryano144/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,caot/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,retomerz/intellij-community,consulo/consulo,asedunov/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,fitermay/intellij-community,adedayo/intellij-community,akosyakov/intellij-community,holmes/intellij-community,diorcety/intellij-community,amith01994/intellij-community,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,allotria/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,semonte/intellij-community,allotria/intellij-community,jexp/idea2,alphafoobar/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,supersven/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,caot/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,diorcety/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,kool79/intellij-community,fnouama/intellij-community,vladmm/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,petteyg/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,hurricup/intellij-community,xfournet/intellij-community,joewalnes/idea-community,ryano144/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,izonder/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,apixandru/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,ernestp/consulo,akosyakov/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,slisson/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,joewalnes/idea-community,samthor/intellij-community,semonte/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,allotria/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,izonder/intellij-community,kdwink/intellij-community,da1z/intellij-community,hurricup/intellij-community,caot/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,caot/intellij-community,clumsy/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,FHannes/intellij-community,xfournet/intellij-community,jexp/idea2,suncycheng/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,supersven/intellij-community,youdonghai/intellij-community,ahb0327/intellij-community,slisson/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,joewalnes/idea-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,adedayo/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,allotria/intellij-community,ernestp/consulo,akosyakov/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,da1z/intellij-community,ahb0327/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,apixandru/intellij-community,izonder/intellij-community,suncycheng/intellij-community,kool79/intellij-community,caot/intellij-community,jagguli/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,youdonghai/intellij-community,samthor/intellij-community,semonte/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,holmes/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,dslomov/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,fnouama/intellij-community,allotria/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,adedayo/intellij-community,xfournet/intellij-community,apixandru/intellij-community,izonder/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,apixandru/intellij-community,supersven/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,FHannes/intellij-community,asedunov/intellij-community,jexp/idea2,gnuhub/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,youdonghai/intellij-community,allotria/intellij-community,kool79/intellij-community,amith01994/intellij-community,fnouama/intellij-community,joewalnes/idea-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,samthor/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,supersven/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,slisson/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,amith01994/intellij-community,adedayo/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,joewalnes/idea-community,diorcety/intellij-community,kdwink/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,slisson/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,ernestp/consulo,wreckJ/intellij-community,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,signed/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,signed/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,clumsy/intellij-community,signed/intellij-community,dslomov/intellij-community,caot/intellij-community,semonte/intellij-community,holmes/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,retomerz/intellij-community,dslomov/intellij-community,dslomov/intellij-community
package com.intellij.psi.impl.source.resolve.reference.impl.providers; import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider; import com.intellij.codeInsight.daemon.JavaErrorMessages; import com.intellij.codeInsight.daemon.QuickFixProvider; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.lookup.LookupValueFactory; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.LocalQuickFixProvider; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Iconable; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.PsiManagerImpl; import com.intellij.psi.impl.source.resolve.ResolveCache; import com.intellij.psi.impl.source.resolve.reference.ProcessorRegistry; import com.intellij.psi.impl.source.resolve.reference.impl.GenericReference; import com.intellij.psi.infos.CandidateInfo; import com.intellij.psi.scope.BaseScopeProcessor; import com.intellij.psi.scope.PsiConflictResolver; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.scope.conflictResolvers.DuplicateConflictResolver; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; /** * @author cdr */ public class FileReference implements PsiPolyVariantReference, QuickFixProvider<FileReference>, LocalQuickFixProvider, EmptyResolveMessageProvider { public static final FileReference[] EMPTY = new FileReference[0]; private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReference"); private final int myIndex; private TextRange myRange; private final String myText; @NotNull private final FileReferenceSet myFileReferenceSet; private static final List<PsiConflictResolver> RESOLVERS = Arrays.<PsiConflictResolver>asList(new DuplicateConflictResolver()); public FileReference(final @NotNull FileReferenceSet fileReferenceSet, TextRange range, int index, String text) { myFileReferenceSet = fileReferenceSet; myIndex = index; myRange = range; myText = text; } @NotNull private Collection<PsiFileSystemItem> getContexts() { final FileReference contextRef = getContextReference(); if (contextRef == null) { return myFileReferenceSet.getDefaultContexts(); } ResolveResult[] resolveResults = contextRef.multiResolve(false); ArrayList<PsiFileSystemItem> result = new ArrayList<PsiFileSystemItem>(); for (ResolveResult resolveResult : resolveResults) { result.add((PsiFileSystemItem)resolveResult.getElement()); } return result; } @NotNull public ResolveResult[] multiResolve(final boolean incompleteCode) { final PsiManager manager = getElement().getManager(); if (manager instanceof PsiManagerImpl) { return ((PsiManagerImpl)manager).getResolveCache().resolveWithCaching(this, MyResolver.INSTANCE, false, false); } return innerResolve(); } protected ResolveResult[] innerResolve() { final String referenceText = getText(); final Collection<PsiFileSystemItem> contexts = getContexts(); final Collection<ResolveResult> result = new ArrayList<ResolveResult>(contexts.size()); for (final PsiFileSystemItem context : contexts) { innerResolveInContext(referenceText, context, result); } final int resultCount = result.size(); return resultCount > 0 ? result.toArray(new ResolveResult[resultCount]) : ResolveResult.EMPTY_ARRAY; } private void innerResolveInContext(@NotNull final String text, @NotNull final PsiFileSystemItem context, final Collection<ResolveResult> result) { if (text.length() == 0 && !myFileReferenceSet.isEndingSlashNotAllowed() && isLast() || ".".equals(text) || "/".equals(text)) { result.add(new PsiElementResolveResult(context)); } else if ("..".equals(text)) { final PsiFileSystemItem resolved = context.getParent(); if (resolved != null) { result.add(new PsiElementResolveResult(resolved)); } } else { final int separatorIndex = text.indexOf('/'); if (separatorIndex >= 0) { final List<ResolveResult> resolvedContexts = new ArrayList<ResolveResult>(); innerResolveInContext(text.substring(0, separatorIndex), context, resolvedContexts); final String restOfText = text.substring(separatorIndex + 1); for (ResolveResult contextVariant : resolvedContexts) { final PsiFileSystemItem item = (PsiFileSystemItem)contextVariant.getElement(); if (item != null) { innerResolveInContext(restOfText, item, result); } } } else { final String decoded = decode(text); if (decoded != null) { processVariants(context, new BaseScopeProcessor() { public boolean execute(final PsiElement element, final PsiSubstitutor substitutor) { final String name = ((PsiFileSystemItem)element).getName(); if (name != null) { if (myFileReferenceSet.isCaseSensitive() ? decoded.equals(name) : decoded.compareToIgnoreCase(name) == 0) { result.add(new PsiElementResolveResult(element)); return false; } } return true; } }); } } } } @Nullable private String decode(final String text) { if (myFileReferenceSet.isUrlEncoded()) { try { return new URI(text).getPath(); } catch (Exception e) { return text; } } return text; } public Object[] getVariants() { final String s = getText(); if (s != null && s.equals("/")) { return ArrayUtil.EMPTY_OBJECT_ARRAY; } try { final List ret = new ArrayList(); final List<Class> allowedClasses = new ArrayList<Class>(); allowedClasses.add(PsiFile.class); for (final FileReferenceHelper helper : getHelpers()) { allowedClasses.add(helper.getDirectoryClass()); } final PsiElementProcessor<PsiFileSystemItem> processor = createChildrenProcessor(myFileReferenceSet.createProcessor(ret, allowedClasses, RESOLVERS)); for (PsiFileSystemItem context : getContexts()) { for (final PsiElement child : context.getChildren()) { if (child instanceof PsiFileSystemItem) { processor.execute((PsiFileSystemItem)child); } } } final Object[] variants = ret.toArray(); if (myFileReferenceSet.isUrlEncoded()) { for (int i = 0; i < variants.length; i++) { if (variants[i] instanceof CandidateInfo && ((CandidateInfo)variants[i]).getElement() instanceof PsiNamedElement) { final PsiNamedElement psiElement = (PsiNamedElement)((CandidateInfo)variants[i]).getElement(); assert psiElement != null; String name = psiElement.getName(); final String encoded = encode(name); if (!encoded.equals(name)) { final Icon icon = psiElement.getIcon(Iconable.ICON_FLAG_READ_STATUS | Iconable.ICON_FLAG_VISIBILITY); final Object lookupValue = LookupValueFactory.createLookupValue(encoded, icon); variants[i] = lookupValue; } } } } return variants; } catch (ProcessorRegistry.IncompatibleReferenceTypeException e) { LOG.error(e); return ArrayUtil.EMPTY_OBJECT_ARRAY; } } private static String encode(final String name) { try { return new URI(null, null, name, null).toString(); } catch (Exception e) { return name; } } private void processVariants(final PsiFileSystemItem context, final PsiScopeProcessor processor) { context.processChildren(createChildrenProcessor(processor)); } private PsiElementProcessor<PsiFileSystemItem> createChildrenProcessor(final PsiScopeProcessor processor) { return new PsiElementProcessor<PsiFileSystemItem>() { public boolean execute(PsiFileSystemItem element) { final VirtualFile file = element.getVirtualFile(); if (file != null && !file.isDirectory()) { final PsiFile psiFile = getElement().getManager().findFile(file); if (psiFile != null) { element = psiFile; } } return processor.execute(element, PsiSubstitutor.EMPTY); } }; } @Nullable private FileReference getContextReference() { return myIndex > 0 ? myFileReferenceSet.getReference(myIndex - 1) : null; } public PsiElement getElement() { return myFileReferenceSet.getElement(); } public PsiFileSystemItem resolve() { ResolveResult[] resolveResults = multiResolve(false); return resolveResults.length == 1 ? (PsiFileSystemItem)resolveResults[0].getElement() : null; } public boolean isReferenceTo(PsiElement element) { if (!(element instanceof PsiFileSystemItem)) return false; final PsiFileSystemItem item = resolve(); return item != null && FileReferenceHelperRegistrar.areElementsEquivalent(item, (PsiFileSystemItem)element); } public TextRange getRangeInElement() { return myRange; } public String getCanonicalText() { return myText; } protected String getText() { return myText; } public boolean isSoft() { return myFileReferenceSet.isSoft(); } public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException { final ElementManipulator<PsiElement> manipulator = GenericReference.getManipulator(getElement()); if (manipulator != null) { myFileReferenceSet.setElement(manipulator.handleContentChange(getElement(), getRangeInElement(), newElementName)); //Correct ranges int delta = newElementName.length() - myRange.getLength(); myRange = new TextRange(getRangeInElement().getStartOffset(), getRangeInElement().getStartOffset() + newElementName.length()); FileReference[] references = myFileReferenceSet.getAllReferences(); for (int idx = myIndex + 1; idx < references.length; idx++) { references[idx].myRange = references[idx].myRange.shiftRight(delta); } return myFileReferenceSet.getElement(); } throw new IncorrectOperationException("Manipulator for this element is not defined"); } public PsiElement bindToElement(@NotNull final PsiElement element) throws IncorrectOperationException { if (!(element instanceof PsiFileSystemItem)) throw new IncorrectOperationException("Cannot bind to element, should be instanceof PsiFileSystemItem: " + element); final PsiFileSystemItem fileSystemItem = (PsiFileSystemItem)element; VirtualFile dstVFile = fileSystemItem.getVirtualFile(); if (dstVFile == null) throw new IncorrectOperationException("Cannot bind to non-physical element:" + element); final PsiFile file = getElement().getContainingFile(); final VirtualFile curVFile = file.getVirtualFile(); if (curVFile == null) throw new IncorrectOperationException("Cannot bind from non-physical element:" + file); final Project project = element.getProject(); final String newName; if (myFileReferenceSet.isAbsolutePathReference()) { PsiFileSystemItem root = null; PsiFileSystemItem dstItem = null; for (final FileReferenceHelper helper : FileReferenceHelperRegistrar.getHelpers()) { PsiFileSystemItem _dstItem = helper.getPsiFileSystemItem(project, dstVFile); if (_dstItem != null) { PsiFileSystemItem _root = helper.findRoot(project, dstVFile); if (_root != null) { root = _root; dstItem = _dstItem; break; } } } if (root == null) return element; final String relativePath = PsiFileSystemItemUtil.getRelativePath(root, dstItem); if (relativePath == null) { return element; } newName = "/" + relativePath; } else { // relative path PsiFileSystemItem curItem = null; PsiFileSystemItem dstItem = null; for (final FileReferenceHelper helper : FileReferenceHelperRegistrar.getHelpers()) { PsiFileSystemItem _curItem = helper.getPsiFileSystemItem(project, curVFile); if (_curItem != null) { PsiFileSystemItem _dstItem = helper.getPsiFileSystemItem(project, dstVFile); if (_dstItem != null) { curItem = _curItem; dstItem = _dstItem; break; } } } checkNotNull(curItem, curVFile, dstVFile); assert curItem != null; if (curItem.equals(dstItem)) { return getElement(); } newName = PsiFileSystemItemUtil.getNotNullRelativePath(curItem, dstItem); } final TextRange range = new TextRange(myFileReferenceSet.getStartInElement(), getRangeInElement().getEndOffset()); final ElementManipulator<PsiElement> manipulator = GenericReference.getManipulator(getElement()); if (manipulator == null) { throw new IncorrectOperationException("Manipulator not defined for: " + getElement()); } return manipulator.handleContentChange(getElement(), range, newName); } private static void checkNotNull(final Object o, final VirtualFile curVFile, final VirtualFile dstVFile) throws IncorrectOperationException { if (o == null) { throw new IncorrectOperationException("Cannot find path between files; src = " + curVFile.getPresentableUrl() + "; dst = " + dstVFile.getPresentableUrl()); } } public void registerQuickfix(HighlightInfo info, FileReference reference) { for (final FileReferenceHelper helper : getHelpers()) { helper.registerFixes(info, reference); } } protected List<FileReferenceHelper> getHelpers() { return FileReferenceHelperRegistrar.getHelpers(); } public int getIndex() { return myIndex; } public String getUnresolvedMessagePattern() { final StringBuffer builder = new StringBuffer(JavaErrorMessages.message("error.cannot.resolve")); builder.append(" ").append(myFileReferenceSet.getTypeName()); if (!isLast()) { for (final FileReferenceHelper helper : getHelpers()) { builder.append(" ").append(JavaErrorMessages.message("error.cannot.resolve.infix")).append(" ") .append(helper.getDirectoryTypeName()); } } builder.append(" ''{0}''."); return builder.toString(); } public final boolean isLast() { return myIndex == myFileReferenceSet.getAllReferences().length - 1; } @NotNull public FileReferenceSet getFileReferenceSet() { return myFileReferenceSet; } public void clearResolveCaches() { final PsiManager manager = getElement().getManager(); if (manager instanceof PsiManagerImpl) { ((PsiManagerImpl)manager).getResolveCache().clearResolveCaches(this); } } public LocalQuickFix[] getQuickFixes() { final List<LocalQuickFix> result = new ArrayList<LocalQuickFix>(); for (final FileReferenceHelper<?> helper : getHelpers()) { result.addAll(helper.registerFixes(null, this)); } return result.toArray(new LocalQuickFix[result.size()]); } static class MyResolver implements ResolveCache.PolyVariantResolver<FileReference> { static MyResolver INSTANCE = new MyResolver(); public ResolveResult[] resolve(FileReference ref, boolean incompleteCode) { return ref.innerResolve(); } } }
source/com/intellij/psi/impl/source/resolve/reference/impl/providers/FileReference.java
package com.intellij.psi.impl.source.resolve.reference.impl.providers; import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider; import com.intellij.codeInsight.daemon.JavaErrorMessages; import com.intellij.codeInsight.daemon.QuickFixProvider; import com.intellij.codeInsight.daemon.impl.HighlightInfo; import com.intellij.codeInsight.lookup.LookupValueFactory; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.LocalQuickFixProvider; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Iconable; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.PsiManagerImpl; import com.intellij.psi.impl.source.resolve.ResolveCache; import com.intellij.psi.impl.source.resolve.reference.ProcessorRegistry; import com.intellij.psi.impl.source.resolve.reference.impl.GenericReference; import com.intellij.psi.infos.CandidateInfo; import com.intellij.psi.scope.BaseScopeProcessor; import com.intellij.psi.scope.PsiConflictResolver; import com.intellij.psi.scope.PsiScopeProcessor; import com.intellij.psi.scope.conflictResolvers.DuplicateConflictResolver; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; /** * @author cdr */ public class FileReference implements PsiPolyVariantReference, QuickFixProvider<FileReference>, LocalQuickFixProvider, EmptyResolveMessageProvider { public static final FileReference[] EMPTY = new FileReference[0]; private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReference"); private final int myIndex; private TextRange myRange; private final String myText; @NotNull private final FileReferenceSet myFileReferenceSet; private static final List<PsiConflictResolver> RESOLVERS = Arrays.<PsiConflictResolver>asList(new DuplicateConflictResolver()); public FileReference(final @NotNull FileReferenceSet fileReferenceSet, TextRange range, int index, String text) { myFileReferenceSet = fileReferenceSet; myIndex = index; myRange = range; myText = text; } @NotNull private Collection<PsiFileSystemItem> getContexts() { final FileReference contextRef = getContextReference(); if (contextRef == null) { return myFileReferenceSet.getDefaultContexts(); } ResolveResult[] resolveResults = contextRef.multiResolve(false); ArrayList<PsiFileSystemItem> result = new ArrayList<PsiFileSystemItem>(); for (ResolveResult resolveResult : resolveResults) { result.add((PsiFileSystemItem)resolveResult.getElement()); } return result; } @NotNull public ResolveResult[] multiResolve(final boolean incompleteCode) { final PsiManager manager = getElement().getManager(); if (manager instanceof PsiManagerImpl) { return ((PsiManagerImpl)manager).getResolveCache().resolveWithCaching(this, MyResolver.INSTANCE, false, false); } return innerResolve(); } protected ResolveResult[] innerResolve() { final String referenceText = getText(); final Collection<PsiFileSystemItem> contexts = getContexts(); final Collection<ResolveResult> result = new ArrayList<ResolveResult>(contexts.size()); for (final PsiFileSystemItem context : contexts) { innerResolveInContext(referenceText, context, result); } final int resultCount = result.size(); return resultCount > 0 ? result.toArray(new ResolveResult[resultCount]) : ResolveResult.EMPTY_ARRAY; } private void innerResolveInContext(@NotNull final String text, @NotNull final PsiFileSystemItem context, final Collection<ResolveResult> result) { if (text.length() == 0 && !myFileReferenceSet.isEndingSlashNotAllowed() && isLast() || ".".equals(text) || "/".equals(text)) { result.add(new PsiElementResolveResult(context)); } else if ("..".equals(text)) { final PsiFileSystemItem resolved = context.getParent(); if (resolved != null) { result.add(new PsiElementResolveResult(resolved)); } } else { final int separatorIndex = text.indexOf('/'); if (separatorIndex >= 0) { final List<ResolveResult> resolvedContexts = new ArrayList<ResolveResult>(); innerResolveInContext(text.substring(0, separatorIndex), context, resolvedContexts); final String restOfText = text.substring(separatorIndex + 1); for (ResolveResult contextVariant : resolvedContexts) { final PsiFileSystemItem item = (PsiFileSystemItem)contextVariant.getElement(); if (item != null) { innerResolveInContext(restOfText, item, result); } } } else { final String decoded = decode(text); if (decoded != null) { processVariants(context, new BaseScopeProcessor() { public boolean execute(final PsiElement element, final PsiSubstitutor substitutor) { final String name = ((PsiFileSystemItem)element).getName(); if (name != null) { if (myFileReferenceSet.isCaseSensitive() ? decoded.equals(name) : decoded.compareToIgnoreCase(name) == 0) { result.add(new PsiElementResolveResult(element)); return false; } } return true; } }); } } } } @Nullable private String decode(final String text) { if (myFileReferenceSet.isUrlEncoded()) { try { return new URI(text).getPath(); } catch (Exception e) { return text; } } return text; } public Object[] getVariants() { final String s = getText(); if (s != null && s.equals("/")) { return ArrayUtil.EMPTY_OBJECT_ARRAY; } try { final List ret = new ArrayList(); final List<Class> allowedClasses = new ArrayList<Class>(); allowedClasses.add(PsiFile.class); for (final FileReferenceHelper helper : getHelpers()) { allowedClasses.add(helper.getDirectoryClass()); } final PsiElementProcessor<PsiFileSystemItem> processor = createChildrenProcessor(myFileReferenceSet.createProcessor(ret, allowedClasses, RESOLVERS)); for (PsiFileSystemItem context : getContexts()) { for (final PsiElement child : context.getChildren()) { if (child instanceof PsiFileSystemItem) { processor.execute((PsiFileSystemItem)child); } } } final Object[] variants = ret.toArray(); if (myFileReferenceSet.isUrlEncoded()) { for (int i = 0; i < variants.length; i++) { if (variants[i] instanceof CandidateInfo && ((CandidateInfo)variants[i]).getElement() instanceof PsiNamedElement) { final PsiNamedElement psiElement = (PsiNamedElement)((CandidateInfo)variants[i]).getElement(); assert psiElement != null; String name = psiElement.getName(); final String encoded = encode(name); if (!encoded.equals(name)) { final Icon icon = psiElement.getIcon(Iconable.ICON_FLAG_READ_STATUS | Iconable.ICON_FLAG_VISIBILITY); final Object lookupValue = LookupValueFactory.createLookupValue(encoded, icon); variants[i] = lookupValue; } } } } return variants; } catch (ProcessorRegistry.IncompatibleReferenceTypeException e) { LOG.error(e); return ArrayUtil.EMPTY_OBJECT_ARRAY; } } private static String encode(final String name) { try { return new URI(null, null, name, null).toString(); } catch (Exception e) { return name; } } private void processVariants(final PsiFileSystemItem context, final PsiScopeProcessor processor) { context.processChildren(createChildrenProcessor(processor)); } private PsiElementProcessor<PsiFileSystemItem> createChildrenProcessor(final PsiScopeProcessor processor) { return new PsiElementProcessor<PsiFileSystemItem>() { public boolean execute(PsiFileSystemItem element) { final VirtualFile file = element.getVirtualFile(); if (file != null && !file.isDirectory()) { final PsiFile psiFile = getElement().getManager().findFile(file); if (psiFile != null) { element = psiFile; } } return processor.execute(element, PsiSubstitutor.EMPTY); } }; } @Nullable private FileReference getContextReference() { return myIndex > 0 ? myFileReferenceSet.getReference(myIndex - 1) : null; } public PsiElement getElement() { return myFileReferenceSet.getElement(); } public PsiFileSystemItem resolve() { ResolveResult[] resolveResults = multiResolve(false); return resolveResults.length == 1 ? (PsiFileSystemItem)resolveResults[0].getElement() : null; } public boolean isReferenceTo(PsiElement element) { if (!(element instanceof PsiFileSystemItem)) return false; final PsiFileSystemItem item = resolve(); return item != null && FileReferenceHelperRegistrar.areElementsEquivalent(item, (PsiFileSystemItem)element); } public TextRange getRangeInElement() { return myRange; } public String getCanonicalText() { return myText; } protected String getText() { return myText; } public boolean isSoft() { return myFileReferenceSet.isSoft(); } public PsiElement handleElementRename(String newElementName) throws IncorrectOperationException { final ElementManipulator<PsiElement> manipulator = GenericReference.getManipulator(getElement()); if (manipulator != null) { myFileReferenceSet.setElement(manipulator.handleContentChange(getElement(), getRangeInElement(), newElementName)); //Correct ranges int delta = newElementName.length() - myRange.getLength(); myRange = new TextRange(getRangeInElement().getStartOffset(), getRangeInElement().getStartOffset() + newElementName.length()); FileReference[] references = myFileReferenceSet.getAllReferences(); for (int idx = myIndex + 1; idx < references.length; idx++) { references[idx].myRange = references[idx].myRange.shiftRight(delta); } return myFileReferenceSet.getElement(); } throw new IncorrectOperationException("Manipulator for this element is not defined"); } public PsiElement bindToElement(@NotNull final PsiElement element) throws IncorrectOperationException { if (!(element instanceof PsiFileSystemItem)) throw new IncorrectOperationException("Cannot bind to element, should be instanceof PsiFileSystemItem: " + element); final PsiFileSystemItem fileSystemItem = (PsiFileSystemItem)element; VirtualFile dstVFile = fileSystemItem.getVirtualFile(); if (dstVFile == null) throw new IncorrectOperationException("Cannot bind to non-physical element:" + element); final PsiFile file = getElement().getContainingFile(); final VirtualFile curVFile = file.getVirtualFile(); if (curVFile == null) throw new IncorrectOperationException("Cannot bind from non-physical element:" + file); final Project project = element.getProject(); final String newName; if (myFileReferenceSet.isAbsolutePathReference()) { PsiFileSystemItem root = null; PsiFileSystemItem dstItem = null; for (final FileReferenceHelper helper : FileReferenceHelperRegistrar.getHelpers()) { PsiFileSystemItem _dstItem = helper.getPsiFileSystemItem(project, dstVFile); if (_dstItem != null) { PsiFileSystemItem _root = helper.findRoot(project, dstVFile); if (_root != null) { root = _root; dstItem = _dstItem; break; } } } if (root == null) return element; newName = "/" + PsiFileSystemItemUtil.getNotNullRelativePath(root, dstItem); } else { // relative path PsiFileSystemItem curItem = null; PsiFileSystemItem dstItem = null; for (final FileReferenceHelper helper : FileReferenceHelperRegistrar.getHelpers()) { PsiFileSystemItem _curItem = helper.getPsiFileSystemItem(project, curVFile); if (_curItem != null) { PsiFileSystemItem _dstItem = helper.getPsiFileSystemItem(project, dstVFile); if (_dstItem != null) { curItem = _curItem; dstItem = _dstItem; break; } } } checkNotNull(curItem, curVFile, dstVFile); assert curItem != null; if (curItem.equals(dstItem)) { return getElement(); } newName = PsiFileSystemItemUtil.getNotNullRelativePath(curItem, dstItem); } final TextRange range = new TextRange(myFileReferenceSet.getStartInElement(), getRangeInElement().getEndOffset()); final ElementManipulator<PsiElement> manipulator = GenericReference.getManipulator(getElement()); if (manipulator == null) { throw new IncorrectOperationException("Manipulator not defined for: " + getElement()); } return manipulator.handleContentChange(getElement(), range, newName); } private static void checkNotNull(final Object o, final VirtualFile curVFile, final VirtualFile dstVFile) throws IncorrectOperationException { if (o == null) { throw new IncorrectOperationException("Cannot find path between files; src = " + curVFile.getPresentableUrl() + "; dst = " + dstVFile.getPresentableUrl()); } } public void registerQuickfix(HighlightInfo info, FileReference reference) { for (final FileReferenceHelper helper : getHelpers()) { helper.registerFixes(info, reference); } } protected List<FileReferenceHelper> getHelpers() { return FileReferenceHelperRegistrar.getHelpers(); } public int getIndex() { return myIndex; } public String getUnresolvedMessagePattern() { final StringBuffer builder = new StringBuffer(JavaErrorMessages.message("error.cannot.resolve")); builder.append(" ").append(myFileReferenceSet.getTypeName()); if (!isLast()) { for (final FileReferenceHelper helper : getHelpers()) { builder.append(" ").append(JavaErrorMessages.message("error.cannot.resolve.infix")).append(" ") .append(helper.getDirectoryTypeName()); } } builder.append(" ''{0}''."); return builder.toString(); } public final boolean isLast() { return myIndex == myFileReferenceSet.getAllReferences().length - 1; } @NotNull public FileReferenceSet getFileReferenceSet() { return myFileReferenceSet; } public void clearResolveCaches() { final PsiManager manager = getElement().getManager(); if (manager instanceof PsiManagerImpl) { ((PsiManagerImpl)manager).getResolveCache().clearResolveCaches(this); } } public LocalQuickFix[] getQuickFixes() { final List<LocalQuickFix> result = new ArrayList<LocalQuickFix>(); for (final FileReferenceHelper<?> helper : getHelpers()) { result.addAll(helper.registerFixes(null, this)); } return result.toArray(new LocalQuickFix[result.size()]); } static class MyResolver implements ResolveCache.PolyVariantResolver<FileReference> { static MyResolver INSTANCE = new MyResolver(); public ResolveResult[] resolve(FileReference ref, boolean incompleteCode) { return ref.innerResolve(); } } }
IDEADEV-18796
source/com/intellij/psi/impl/source/resolve/reference/impl/providers/FileReference.java
IDEADEV-18796
<ide><path>ource/com/intellij/psi/impl/source/resolve/reference/impl/providers/FileReference.java <ide> } <ide> } <ide> if (root == null) return element; <del> newName = "/" + PsiFileSystemItemUtil.getNotNullRelativePath(root, dstItem); <add> final String relativePath = PsiFileSystemItemUtil.getRelativePath(root, dstItem); <add> if (relativePath == null) { <add> return element; <add> } <add> newName = "/" + relativePath; <ide> <ide> } else { // relative path <ide> PsiFileSystemItem curItem = null;
JavaScript
mit
ee55b42d8358d671da7a189d121e91d2ffa10ac8
0
participedia/api,participedia/api,participedia/api,participedia/api
"use strict"; // deploy on heroku-18 stack const path = require("path"); const process = require("process"); require("dotenv").config({ silent: process.env.NODE_ENV === "production" }); const express = require("express"); const app = express(); const exphbs = require("express-handlebars"); const fs = require("fs"); const cookieParser = require("cookie-parser"); const session = require("express-session"); const passport = require("passport"); const Auth0Strategy = require("passport-auth0"); const i18n = require("i18n"); const apicache = require("apicache"); const compression = require("compression"); const errorhandler = require("errorhandler"); const morgan = require("morgan"); const bodyParser = require("body-parser"); const methodOverride = require("method-override"); const Sentry = require("@sentry/node"); // only instantiate sentry logging if we are on staging or prod if ( process.env.NODE_ENV === "production" || process.env.NODE_ENV === "staging" ) { Sentry.init({ dsn: process.env.SENTRY_DSN, environment: process.env.NODE_ENV, }); // The request handler must be the first middleware on the app app.use(Sentry.Handlers.requestHandler()); } // other logging middlewear app.use(morgan("dev")); // request logging // Actual Participedia APIS vs. Nodejs gunk const handlebarsHelpers = require("./api/helpers/handlebars-helpers.js"); const { collection_ } = require("./api/controllers/collection"); const { case_ } = require("./api/controllers/case"); const { method } = require("./api/controllers/method"); const { organization } = require("./api/controllers/organization"); const bookmark = require("./api/controllers/bookmark"); const search = require("./api/controllers/search"); const list = require("./api/controllers/list"); const user = require("./api/controllers/user"); const { getUserOrCreateUser } = require("./api/helpers/user.js"); const oldDotNetUrlHandler = require("./api/helpers/old-dot-net-url-handler.js"); const { SUPPORTED_LANGUAGES } = require("./constants.js"); const logError = require("./api/helpers/log-error.js"); const port = process.env.PORT || 3001; app.use(errorhandler()); // canonicalize url app.use((req, res, next) => { if ( process.env.NODE_ENV === "production" && req.hostname !== "participedia.net" && !res.headersSent ) { res.redirect("https://participedia.net" + req.originalUrl); } else { next(); } }); // CONFIGS app.use(compression()); app.set("port", port); app.use(express.static("public", { index: false })); app.use(methodOverride()); // Do we actually use/need this? app.use(bodyParser.json({ limit: "50mb" })); app.use(bodyParser.urlencoded({ limit: "50mb", extended: true })); app.use(cookieParser()); i18n.configure({ locales: SUPPORTED_LANGUAGES.map(locale => locale.twoLetterCode), cookie: "locale", extension: ".js", directory: "./locales", updateFiles: false, }); app.use((req, res, next) => { // set english as the default locale, if it's not already set if (!req.cookies.locale) { res.cookie("locale", "en", { path: "/" }); } next(); }); app.use(i18n.init); // config express-session const sess = { store: new (require("connect-pg-simple")(session))(), secret: process.env.COOKIE_SECRET, cookie: { maxAge: 30 * 24 * 60 * 60 * 1000 }, // 30 days resave: false, saveUninitialized: true, }; if (app.get("env") === "production") { sess.cookie.secure = true; // serve secure cookies, requires https } app.use(session(sess)); app.set("trust proxy", 1); // Configure Passport to use Auth0 const strategy = new Auth0Strategy( { domain: process.env.AUTH0_DOMAIN, clientID: process.env.AUTH0_CLIENT_ID, clientSecret: process.env.AUTH0_CLIENT_SECRET, callbackURL: process.env.CALLBACK_URL || "/redirect", }, function(accessToken, refreshToken, extraParams, profile, done) { // accessToken is the token to call Auth0 API (not needed in the most cases) // extraParams.id_token has the JSON Web Token // profile has all the information from the user return done(null, profile); } ); passport.use(strategy); app.use(passport.initialize()); app.use(passport.session()); // You can use this section to keep a smaller payload passport.serializeUser(function(user, done) { done(null, user); }); passport.deserializeUser(async function(user, done) { // get db user from auth0 user data const dbUser = await getUserOrCreateUser(user._json); done(null, dbUser); }); // Perform the login, after login Auth0 will redirect to callback app.get("/login", function(req, res, next) { // set returnTo session var to referer so user is redirected to current page after login req.session.returnTo = req.headers.referer; req.session.refreshAndClose = req.query.refreshAndClose; passport.authenticate( "auth0", { scope: "offline openid email profile", }, () => {} )(req, res, next); }); // Perform the final stage of authentication and redirect to previously requested URL or '/user' app.get("/redirect", function(req, res, next) { passport.authenticate("auth0", function(err, user, info) { if (err) { return next(err); } if (!user) { // return res.redirect("/login"); return res.redirect("/"); } req.logIn(user, function(err) { if (err) { return next(err); } let returnToUrl = req.session.returnTo; const refreshAndClose = req.session.refreshAndClose; delete req.session.returnTo; delete req.session.refreshAndClose; if (refreshAndClose === "true") { returnToUrl = returnToUrl + "?refreshAndClose=true"; } res.redirect(returnToUrl || "/"); }); })(req, res, next); }); // Perform session logout and redirect to homepage app.get("/logout", (req, res) => { const currentUrl = `${req.protocol}://${req.headers.host}`; req.logout(); res.redirect( `https://${process.env.AUTH0_DOMAIN}/v2/logout?returnTo=${currentUrl}` ); }); const cache = apicache.middleware; apicache.options({ debug: true, enabled: false, successCodes: [200, 201], }); // TODO Invalidate apicache on PUT/POST/DELETE using apicache.clear(req.params.collection); const hbs = exphbs.create({ // Specify helpers which are only registered on this instance. defaultLayout: "main", extname: ".html", helpers: handlebarsHelpers, }); app.engine(".html", hbs.engine); app.set("view engine", ".html"); // make data available as local vars in templates app.use((req, res, next) => { const gaTrackingIdByEnv = { production: process.env.GOOGLE_TRACKING_ID_PROD, staging: process.env.GOOGLE_TRACKING_ID_STAGE, development: process.env.GOOGLE_TRACKING_ID_DEV, }; res.locals.req = req; res.locals.GA_TRACKING_ID = gaTrackingIdByEnv[process.env.NODE_ENV]; next(); }); // ROUTES app.use("/", cache("5 minutes"), search); app.use("/collection", collection_); app.use("/case", case_); app.use("/organization", organization); app.use("/method", method); app.use("/list", list); app.use("/user", user); app.use("/bookmark", bookmark); // endpoint to set new locale app.get("/set-locale", function(req, res) { const locale = req.query && req.query.locale; const redirectTo = req.query && req.query.redirectTo; if (locale) { res.cookie("locale", locale, { path: "/" }); } return res.redirect(redirectTo || "/"); }); app.get("/about", function(req, res) { res.status(200).render("about-view"); }); app.get("/legal", function(req, res) { res.status(200).render("legal-view"); }); app.get("/research", function(req, res) { res.status(200).render("research-view"); }); app.get("/teaching", function(req, res) { res.status(200).render("teaching-view"); }); app.get("/content-chooser", function(req, res) { res.status(200).render("content-chooser"); }); app.get("/help-faq-contact", function(req, res) { res.status(200).render("help-faq-contact-view"); }); // redirect old user profile for tanyapuravankara to new url // we are only doing it for this user account, since it gets hits on google app.get("/en/people/tanyapuravankara", function(req, res) { return res.redirect("/user/8198"); }); // /citizensvoicescovid // vanity url for covid related intiative, redirects to a google site page app.get("/citizensvoicescovid", function(req, res) { return res.redirect("https://sites.google.com/participedia.net/citizensvoicescovid"); }); // redirect old .net urls to their new urls app.use((req, res, next) => { const path = req.originalUrl; if (oldDotNetUrlHandler.hasMatch(path)) { // redirect old .net urls to new urls return res.redirect(oldDotNetUrlHandler.getNewUrl(path)); } next(); }); app.get("/robots.txt", function(req, res, next) { // send different robots.txt files for different environments if ( process.env.NODE_ENV === "staging" || process.env.NODE_ENV === "production" ) { return res .status(200) .sendFile(`${process.env.PWD}/public/robots-${process.env.NODE_ENV}.txt`); } next(); }); // 404 error handling // this should always be after all routes to catch all invalid urls app.use((req, res, next) => { res.status(404).render("404"); }); // The error handler must be before any other logging middleware and after all controllers app.use(Sentry.Handlers.errorHandler()); if (process.env.NODE_ENV === "development") { // only use in development app.use(errorhandler()); } // Better logging of "unhandled" promise exceptions process.on("unhandledRejection", function(reason, p) { logError(`Possibly Unhandled Rejection at: Promise for reason ${reason}`); }); module.exports = app;
app.js
"use strict"; // deploy on heroku-18 stack const path = require("path"); const process = require("process"); require("dotenv").config({ silent: process.env.NODE_ENV === "production" }); const express = require("express"); const app = express(); const exphbs = require("express-handlebars"); const fs = require("fs"); const cookieParser = require("cookie-parser"); const session = require("express-session"); const passport = require("passport"); const Auth0Strategy = require("passport-auth0"); const i18n = require("i18n"); const apicache = require("apicache"); const compression = require("compression"); const errorhandler = require("errorhandler"); const morgan = require("morgan"); const bodyParser = require("body-parser"); const methodOverride = require("method-override"); const Sentry = require("@sentry/node"); // only instantiate sentry logging if we are on staging or prod if ( process.env.NODE_ENV === "production" || process.env.NODE_ENV === "staging" ) { Sentry.init({ dsn: process.env.SENTRY_DSN, environment: process.env.NODE_ENV, }); // The request handler must be the first middleware on the app app.use(Sentry.Handlers.requestHandler()); } // other logging middlewear app.use(morgan("dev")); // request logging // Actual Participedia APIS vs. Nodejs gunk const handlebarsHelpers = require("./api/helpers/handlebars-helpers.js"); const { collection_ } = require("./api/controllers/collection"); const { case_ } = require("./api/controllers/case"); const { method } = require("./api/controllers/method"); const { organization } = require("./api/controllers/organization"); const bookmark = require("./api/controllers/bookmark"); const search = require("./api/controllers/search"); const list = require("./api/controllers/list"); const user = require("./api/controllers/user"); const { getUserOrCreateUser } = require("./api/helpers/user.js"); const oldDotNetUrlHandler = require("./api/helpers/old-dot-net-url-handler.js"); const { SUPPORTED_LANGUAGES } = require("./constants.js"); const logError = require("./api/helpers/log-error.js"); const port = process.env.PORT || 3001; app.use(errorhandler()); // canonicalize url app.use((req, res, next) => { if ( process.env.NODE_ENV === "production" && req.hostname !== "participedia.net" && !res.headersSent ) { res.redirect("https://participedia.net" + req.originalUrl); } else { next(); } }); // CONFIGS app.use(compression()); app.set("port", port); app.use(express.static("public", { index: false })); app.use(methodOverride()); // Do we actually use/need this? app.use(bodyParser.json({ limit: "50mb" })); app.use(bodyParser.urlencoded({ limit: "50mb", extended: true })); app.use(cookieParser()); i18n.configure({ locales: SUPPORTED_LANGUAGES.map(locale => locale.twoLetterCode), cookie: "locale", extension: ".js", directory: "./locales", updateFiles: false, }); app.use((req, res, next) => { // set english as the default locale, if it's not already set if (!req.cookies.locale) { res.cookie("locale", "en", { path: "/" }); } next(); }); app.use(i18n.init); // config express-session const sess = { store: new (require("connect-pg-simple")(session))(), secret: process.env.COOKIE_SECRET, cookie: { maxAge: 30 * 24 * 60 * 60 * 1000 }, // 30 days resave: false, saveUninitialized: true, }; if (app.get("env") === "production") { sess.cookie.secure = true; // serve secure cookies, requires https } app.use(session(sess)); app.set("trust proxy", 1); // Configure Passport to use Auth0 const strategy = new Auth0Strategy( { domain: process.env.AUTH0_DOMAIN, clientID: process.env.AUTH0_CLIENT_ID, clientSecret: process.env.AUTH0_CLIENT_SECRET, callbackURL: process.env.CALLBACK_URL || "/redirect", }, function(accessToken, refreshToken, extraParams, profile, done) { // accessToken is the token to call Auth0 API (not needed in the most cases) // extraParams.id_token has the JSON Web Token // profile has all the information from the user return done(null, profile); } ); passport.use(strategy); app.use(passport.initialize()); app.use(passport.session()); // You can use this section to keep a smaller payload passport.serializeUser(function(user, done) { done(null, user); }); passport.deserializeUser(async function(user, done) { // get db user from auth0 user data const dbUser = await getUserOrCreateUser(user._json); done(null, dbUser); }); // Perform the login, after login Auth0 will redirect to callback app.get("/login", function(req, res, next) { // set returnTo session var to referer so user is redirected to current page after login req.session.returnTo = req.headers.referer; req.session.refreshAndClose = req.query.refreshAndClose; passport.authenticate( "auth0", { scope: "offline openid email profile", }, () => {} )(req, res, next); }); // Perform the final stage of authentication and redirect to previously requested URL or '/user' app.get("/redirect", function(req, res, next) { passport.authenticate("auth0", function(err, user, info) { if (err) { return next(err); } if (!user) { // return res.redirect("/login"); return res.redirect("/"); } req.logIn(user, function(err) { if (err) { return next(err); } let returnToUrl = req.session.returnTo; const refreshAndClose = req.session.refreshAndClose; delete req.session.returnTo; delete req.session.refreshAndClose; if (refreshAndClose === "true") { returnToUrl = returnToUrl + "?refreshAndClose=true"; } res.redirect(returnToUrl || "/"); }); })(req, res, next); }); // Perform session logout and redirect to homepage app.get("/logout", (req, res) => { const currentUrl = `${req.protocol}://${req.headers.host}`; req.logout(); res.redirect( `https://${process.env.AUTH0_DOMAIN}/v2/logout?returnTo=${currentUrl}` ); }); const cache = apicache.middleware; apicache.options({ debug: true, enabled: false, successCodes: [200, 201], }); // TODO Invalidate apicache on PUT/POST/DELETE using apicache.clear(req.params.collection); const hbs = exphbs.create({ // Specify helpers which are only registered on this instance. defaultLayout: "main", extname: ".html", helpers: handlebarsHelpers, }); app.engine(".html", hbs.engine); app.set("view engine", ".html"); // make data available as local vars in templates app.use((req, res, next) => { const gaTrackingIdByEnv = { production: process.env.GOOGLE_TRACKING_ID_PROD, staging: process.env.GOOGLE_TRACKING_ID_STAGE, development: process.env.GOOGLE_TRACKING_ID_DEV, }; res.locals.req = req; res.locals.GA_TRACKING_ID = gaTrackingIdByEnv[process.env.NODE_ENV]; next(); }); // ROUTES app.use("/", cache("5 minutes"), search); app.use("/collection", collection_); app.use("/case", case_); app.use("/organization", organization); app.use("/method", method); app.use("/list", list); app.use("/user", user); app.use("/bookmark", bookmark); // endpoint to set new locale app.get("/set-locale", function(req, res) { const locale = req.query && req.query.locale; const redirectTo = req.query && req.query.redirectTo; if (locale) { res.cookie("locale", locale, { path: "/" }); } return res.redirect(redirectTo || "/"); }); app.get("/about", function(req, res) { res.status(200).render("about-view"); }); app.get("/legal", function(req, res) { res.status(200).render("legal-view"); }); app.get("/research", function(req, res) { res.status(200).render("research-view"); }); app.get("/teaching", function(req, res) { res.status(200).render("teaching-view"); }); app.get("/content-chooser", function(req, res) { res.status(200).render("content-chooser"); }); app.get("/help-faq-contact", function(req, res) { res.status(200).render("help-faq-contact-view"); }); // redirect old user profile for tanyapuravankara to new url // we are only doing it for this user account, since it gets hits on google app.get("/en/people/tanyapuravankara", function(req, res) { return res.redirect("/user/8198"); }); // /citizensvoicescovid // vanity url for covid related intiative, redirects to a google site page app.get("/citizensvoicescovid", function(req, res) { // TODO: replace with correct google site page url return res.redirect("https://participedia.net"); }); // redirect old .net urls to their new urls app.use((req, res, next) => { const path = req.originalUrl; if (oldDotNetUrlHandler.hasMatch(path)) { // redirect old .net urls to new urls return res.redirect(oldDotNetUrlHandler.getNewUrl(path)); } next(); }); app.get("/robots.txt", function(req, res, next) { // send different robots.txt files for different environments if ( process.env.NODE_ENV === "staging" || process.env.NODE_ENV === "production" ) { return res .status(200) .sendFile(`${process.env.PWD}/public/robots-${process.env.NODE_ENV}.txt`); } next(); }); // 404 error handling // this should always be after all routes to catch all invalid urls app.use((req, res, next) => { res.status(404).render("404"); }); // The error handler must be before any other logging middleware and after all controllers app.use(Sentry.Handlers.errorHandler()); if (process.env.NODE_ENV === "development") { // only use in development app.use(errorhandler()); } // Better logging of "unhandled" promise exceptions process.on("unhandledRejection", function(reason, p) { logError(`Possibly Unhandled Rejection at: Promise for reason ${reason}`); }); module.exports = app;
update to new site url
app.js
update to new site url
<ide><path>pp.js <ide> // /citizensvoicescovid <ide> // vanity url for covid related intiative, redirects to a google site page <ide> app.get("/citizensvoicescovid", function(req, res) { <del> // TODO: replace with correct google site page url <del> return res.redirect("https://participedia.net"); <add> return res.redirect("https://sites.google.com/participedia.net/citizensvoicescovid"); <ide> }); <ide> <ide> // redirect old .net urls to their new urls
Java
apache-2.0
a5c3d403580ad81e0fe75e360c9e6ad93c60496b
0
mbudiu-vmw/hiero,lalithsuresh/hiero,uwieder/hiero,mbudiu-vmw/hiero,lalithsuresh/hiero,mbudiu-vmw/hiero,lalithsuresh/hiero,uwieder/hiero,lalithsuresh/hiero,mbudiu-vmw/hiero,uwieder/hiero,mbudiu-vmw/hiero,uwieder/hiero,uwieder/hiero,lalithsuresh/hiero
package org.hiero.sketch.table.api; import org.checkerframework.checker.nullness.qual.NonNull; import org.hiero.utils.Randomness; import org.hiero.sketch.table.SparseMembership; import org.hiero.utils.IntSet; /** * A IMembershipSet is a representation of a set of integers. * These integers represent row indexes in a table. If an integer * is in an IMembershipSet, then it is present in the table. */ public interface IMembershipSet extends IRowOrder { /** * @param rowIndex A non-negative row index. * @return True if the given rowIndex is a member of the set. */ boolean isMember(int rowIndex); /** * @return an IMembershipSet containing k samples from the membership map. The samples are made * without replacement. Returns the full set if its size is smaller than k. There is no guarantee that * two subsequent samples return the same sample set. */ IMembershipSet sample(int k); /** * @return an IMembershipSet containing k samples from the membership map. The samples are made * without replacement. Returns the full set if its size is smaller than k. The pseudo-random * generator is seeded with parameter seed. */ IMembershipSet sample(int k, long seed); /** * @return a sample of size (rate * rowCount). randomizes between the floor and ceiling of this expression. */ default IMembershipSet sample(double rate) { return this.sample(this.getSampleSize(rate, 0, false)); } /** * @return same as sample(double rate) but with the seed for randomness specified by the caller. */ default IMembershipSet sample(double rate, long seed) { return this.sample(this.getSampleSize(rate, seed, true), seed); } /** * @return a new map which is the union of current map and otherMap. */ IMembershipSet union(@NonNull IMembershipSet otherMap); IMembershipSet intersection(@NonNull IMembershipSet otherMap); default IMembershipSet setMinus(@NonNull IMembershipSet otherMap) { final IntSet setMinusSet = new IntSet(); final IRowIterator iter = this.getIterator(); int curr = iter.getNextRow(); while (curr >= 0) { if (!otherMap.isMember(curr)) setMinusSet.add(curr); curr = iter.getNextRow(); } return new SparseMembership(setMinusSet); } default int getSampleSize(double rate, long seed, boolean useSeed) { Randomness r = Randomness.getInstance(); if (useSeed) r.setSeed(seed); final int sampleSize; final double appSampleSize = rate * this.getSize(); if (r.nextDouble() < (appSampleSize - Math.floor(appSampleSize))) sampleSize = (int) Math.floor(appSampleSize); else sampleSize = (int) Math.ceil(appSampleSize); return sampleSize; } }
hieroplatform/src/main/java/org/hiero/sketch/table/api/IMembershipSet.java
package org.hiero.sketch.table.api; import org.checkerframework.checker.nullness.qual.NonNull; import org.hiero.utils.Randomness; import org.hiero.sketch.table.SparseMembership; import org.hiero.utils.IntSet; import java.util.Random; /** * A IMembershipSet is a representation of a set of integers. * These integers represent row indexes in a table. If an integer * is in an IMembershipSet, then it is present in the table. */ public interface IMembershipSet extends IRowOrder { /** * @param rowIndex A non-negative row index. * @return True if the given rowIndex is a member of the set. */ boolean isMember(int rowIndex); /** * @return an IMembershipSet containing k samples from the membership map. The samples are made * without replacement. Returns the full set if its size is smaller than k. There is no guarantee that * two subsequent samples return the same sample set. */ IMembershipSet sample(int k); /** * @return an IMembershipSet containing k samples from the membership map. The samples are made * without replacement. Returns the full set if its size is smaller than k. The pseudo-random * generator is seeded with parameter seed. */ IMembershipSet sample(int k, long seed); /** * @return a sample of size (rate * rowCount). randomizes between the floor and ceiling of this expression. */ default IMembershipSet sample(double rate) { return this.sample(this.getSampleSize(rate, 0, false)); } /** * @return same as sample(double rate) but with the seed for randomness specified by the caller. */ default IMembershipSet sample(double rate, long seed) { return this.sample(this.getSampleSize(rate, seed, true), seed); } /** * @return a new map which is the union of current map and otherMap. */ IMembershipSet union(@NonNull IMembershipSet otherMap); IMembershipSet intersection(@NonNull IMembershipSet otherMap); default IMembershipSet setMinus(@NonNull IMembershipSet otherMap) { final IntSet setMinusSet = new IntSet(); final IRowIterator iter = this.getIterator(); int curr = iter.getNextRow(); while (curr >= 0) { if (!otherMap.isMember(curr)) setMinusSet.add(curr); curr = iter.getNextRow(); } return new SparseMembership(setMinusSet); } default int getSampleSize(double rate, long seed, boolean useSeed) { Randomness r = Randomness.getInstance(); if (useSeed) r.setSeed(seed); final int sampleSize; final double appSampleSize = rate * this.getSize(); if (r.nextDouble() < (appSampleSize - Math.floor(appSampleSize))) sampleSize = (int) Math.floor(appSampleSize); else sampleSize = (int) Math.ceil(appSampleSize); return sampleSize; } }
remove import
hieroplatform/src/main/java/org/hiero/sketch/table/api/IMembershipSet.java
remove import
<ide><path>ieroplatform/src/main/java/org/hiero/sketch/table/api/IMembershipSet.java <ide> import org.hiero.utils.Randomness; <ide> import org.hiero.sketch.table.SparseMembership; <ide> import org.hiero.utils.IntSet; <del> <del> <del>import java.util.Random; <del> <ide> <ide> /** <ide> * A IMembershipSet is a representation of a set of integers.
Java
apache-2.0
d8b73c02fbbe025060a8335c073b78c1a72a4b05
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python.psi.impl; import com.google.common.collect.Maps; import com.intellij.ProjectTopics; import com.intellij.injected.editor.VirtualFileWindow; import com.intellij.notebook.editor.BackedVirtualFile; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.fileTypes.FileTypeRegistry; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.DumbModeTask; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.impl.FilePropertyPusher; import com.intellij.openapi.roots.impl.PushedFilePropertiesUpdater; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileVisitor; import com.intellij.openapi.vfs.newvfs.FileAttribute; import com.intellij.psi.SingleRootFileViewProvider; import com.intellij.testFramework.LightVirtualFile; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.indexing.IndexingBundle; import com.intellij.util.io.DataInputOutputUtil; import com.intellij.util.messages.MessageBus; import com.jetbrains.python.PythonCodeStyleService; import com.jetbrains.python.PythonFileType; import com.jetbrains.python.PythonRuntimeService; import com.jetbrains.python.codeInsight.typing.PyTypeShed; import com.jetbrains.python.module.PyModuleService; import com.jetbrains.python.psi.LanguageLevel; import com.jetbrains.python.psi.resolve.PythonSdkPathCache; import com.jetbrains.python.sdk.PythonSdkUtil; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.*; import java.util.stream.Collectors; /** * @author yole */ public class PythonLanguageLevelPusher implements FilePropertyPusher<String> { private static final Key<String> KEY = new Key<>("python.language.level"); /* It so happens that no single language level is compatible with more than one other. So a map suffices for representation*/ private static final Map<LanguageLevel, LanguageLevel> COMPATIBLE_LEVELS; static { Map<LanguageLevel, LanguageLevel> compatLevels = Maps.newEnumMap(LanguageLevel.class); addCompatiblePair(compatLevels, LanguageLevel.PYTHON26, LanguageLevel.PYTHON27); addCompatiblePair(compatLevels, LanguageLevel.PYTHON33, LanguageLevel.PYTHON34); COMPATIBLE_LEVELS = Maps.immutableEnumMap(compatLevels); } private static void addCompatiblePair(Map<LanguageLevel, LanguageLevel> levels, LanguageLevel l1, LanguageLevel l2) { levels.put(l1, l2); levels.put(l2, l1); } private final Map<Module, Sdk> myModuleSdks = ContainerUtil.createWeakMap(); @Override public void initExtra(@NotNull Project project) { final Map<Module, Sdk> moduleSdks = getPythonModuleSdks(project); final Set<Sdk> distinctSdks = StreamEx.ofValues(moduleSdks).nonNull().collect(Collectors.toCollection(LinkedHashSet::new)); myModuleSdks.putAll(moduleSdks); resetProjectLanguageLevel(project); updateSdkLanguageLevels(project, distinctSdks); guessLanguageLevelWithCaching(project); } @Override @NotNull public Key<String> getFileDataKey() { return KEY; } @Override public boolean pushDirectoriesOnly() { return true; } @Override @NotNull public String getDefaultValue() { return LanguageLevel.toPythonVersion(LanguageLevel.getDefault()); } @Override @Nullable public String getImmediateValue(@NotNull Project project, @Nullable VirtualFile file) { return null; } @Nullable private static Sdk getFileSdk(@NotNull Project project, @NotNull VirtualFile file) { final Module module = ModuleUtilCore.findModuleForFile(file, project); if (module != null) { final Sdk sdk = PythonSdkUtil.findPythonSdk(module); if (sdk != null) { return sdk; } return null; } else { return findSdkForFileOutsideTheProject(project, file); } } @Nullable private static Sdk findSdkForFileOutsideTheProject(Project project, VirtualFile file) { if (file != null) { final List<OrderEntry> orderEntries = ProjectRootManager.getInstance(project).getFileIndex().getOrderEntriesForFile(file); for (OrderEntry orderEntry : orderEntries) { if (orderEntry instanceof JdkOrderEntry) { return ((JdkOrderEntry)orderEntry).getJdk(); } } } return null; } @Override @NotNull public String getImmediateValue(@NotNull Module module) { if (ApplicationManager.getApplication().isUnitTestMode() && LanguageLevel.FORCE_LANGUAGE_LEVEL != null) { return LanguageLevel.toPythonVersion(LanguageLevel.FORCE_LANGUAGE_LEVEL); } final Sdk sdk = PythonSdkUtil.findPythonSdk(module); return LanguageLevel.toPythonVersion(PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk)); } @Override public boolean acceptsFile(@NotNull VirtualFile file, @NotNull Project project) { return false; } @Override public boolean acceptsDirectory(@NotNull VirtualFile file, @NotNull Project project) { return true; } private static final FileAttribute PERSISTENCE = new FileAttribute("python_language_level_persistence", 2, true); private static boolean areLanguageLevelsCompatible(LanguageLevel oldLevel, LanguageLevel newLevel) { return oldLevel != null && newLevel != null && COMPATIBLE_LEVELS.get(oldLevel) == newLevel; } @Override public void persistAttribute(@NotNull Project project, @NotNull VirtualFile fileOrDir, @NotNull String value) throws IOException { final LanguageLevel level = LanguageLevel.fromPythonVersion(value); final DataInputStream iStream = PERSISTENCE.readAttribute(fileOrDir); LanguageLevel oldLanguageLevel = null; if (iStream != null) { try { final int oldLevelOrdinal = DataInputOutputUtil.readINT(iStream); if (oldLevelOrdinal == level.ordinal()) return; oldLanguageLevel = Arrays.stream(LanguageLevel.values()).filter(it -> it.ordinal() == oldLevelOrdinal).findFirst().orElse(null); } finally { iStream.close(); } } try (DataOutputStream oStream = PERSISTENCE.writeAttribute(fileOrDir)) { DataInputOutputUtil.writeINT(oStream, level.ordinal()); } if (!areLanguageLevelsCompatible(oldLanguageLevel, level) || !ProjectFileIndex.getInstance(project).isInContent(fileOrDir)) { PushedFilePropertiesUpdater.getInstance(project).filePropertiesChanged(fileOrDir, PythonLanguageLevelPusher::isPythonFile); } for (VirtualFile child : fileOrDir.getChildren()) { if (!child.isDirectory() && isPythonFile(child)) { clearSdkPathCache(child); } } } private static boolean isPythonFile(VirtualFile child) { return PythonFileType.INSTANCE.equals(FileTypeRegistry.getInstance().getFileTypeByFileName(child.getNameSequence())); } private static void clearSdkPathCache(@NotNull final VirtualFile child) { final Project[] projects = ProjectManager.getInstance().getOpenProjects(); for (Project project : projects) { final Sdk sdk = getFileSdk(project, child); if (sdk != null) { final PythonSdkPathCache pathCache = PythonSdkPathCache.getInstance(project, sdk); pathCache.clearCache(); } } } @Override public void afterRootsChanged(@NotNull final Project project) { final Map<Module, Sdk> moduleSdks = getPythonModuleSdks(project); final Set<Sdk> distinctSdks = StreamEx.ofValues(moduleSdks).nonNull().collect(Collectors.toCollection(LinkedHashSet::new)); final boolean needToReparseOpenFiles = StreamEx.of(moduleSdks.entrySet()).anyMatch((entry -> { final Module module = entry.getKey(); final Sdk newSdk = entry.getValue(); final Sdk oldSdk = myModuleSdks.get(module); return myModuleSdks.containsKey(module) && newSdk != oldSdk; })); myModuleSdks.putAll(moduleSdks); resetProjectLanguageLevel(project); updateSdkLanguageLevels(project, distinctSdks); guessLanguageLevelWithCaching(project); if (needToReparseOpenFiles) { ApplicationManager.getApplication().invokeLater(() -> { if (project.isDisposed()) { return; } PythonCodeStyleService.getInstance().reparseOpenEditorFiles(project); }); } } @NotNull private static Map<Module, Sdk> getPythonModuleSdks(@NotNull Project project) { final Map<Module, Sdk> result = new LinkedHashMap<>(); for (Module module : ModuleManager.getInstance(project).getModules()) { if (isPythonModule(module)) { result.put(module, PythonSdkUtil.findPythonSdk(module)); } } return result; } private static boolean isPythonModule(@NotNull final Module module) { return PyModuleService.getInstance().isPythonModule(module); } private void updateSdkLanguageLevels(@NotNull Project project, @NotNull Set<Sdk> sdks) { if (sdks.isEmpty()) return; final DumbService dumbService = DumbService.getInstance(project); final DumbModeTask task = new DumbModeTask() { @Override public void performInDumbMode(@NotNull ProgressIndicator indicator) { if (project.isDisposed()) return; //final PerformanceWatcher.Snapshot snapshot = PerformanceWatcher.takeSnapshot(); indicator.setIndeterminate(true); indicator.setText(IndexingBundle.message("progress.indexing.scanning")); final List<Runnable> tasks = ReadAction.compute(() -> getRootUpdateTasks(project, sdks)); PushedFilePropertiesUpdater.getInstance(project).runConcurrentlyIfPossible(tasks); //if (!ApplicationManager.getApplication().isUnitTestMode()) { // snapshot.logResponsivenessSinceCreation("Pushing Python language level to " + tasks.size() + " roots in " + sdks.size() + // " SDKs"); //} } }; project.getMessageBus().connect(task).subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() { @Override public void rootsChanged(@NotNull ModuleRootEvent event) { DumbService.getInstance(project).cancelTask(task); } }); dumbService.queueTask(task); } private List<Runnable> getRootUpdateTasks(@NotNull Project project, @NotNull Set<Sdk> sdks) { final List<Runnable> results = new ArrayList<>(); for (Sdk sdk : sdks) { final LanguageLevel languageLevel = PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk); for (VirtualFile root : sdk.getRootProvider().getFiles(OrderRootType.CLASSES)) { if (!root.isValid() || PyTypeShed.INSTANCE.isInside(root)) { continue; } final VirtualFile parent = root.getParent(); final boolean shouldSuppressSizeLimit = parent != null && parent.getName().equals(PythonSdkUtil.SKELETON_DIR_NAME); results.add(new UpdateRootTask(project, root, languageLevel, shouldSuppressSizeLimit)); } } return results; } @NotNull private static LanguageLevel guessLanguageLevelWithCaching(@NotNull Project project) { LanguageLevel languageLevel = LanguageLevel.fromPythonVersion(project.getUserData(KEY)); if (languageLevel == null) { languageLevel = guessLanguageLevel(project); project.putUserData(KEY, LanguageLevel.toPythonVersion(languageLevel)); } return languageLevel; } private static void resetProjectLanguageLevel(@NotNull Project project) { project.putUserData(KEY, null); } @NotNull private static LanguageLevel guessLanguageLevel(@NotNull Project project) { final ModuleManager moduleManager = ModuleManager.getInstance(project); if (moduleManager != null) { LanguageLevel maxLevel = null; for (Module projectModule : moduleManager.getModules()) { final Sdk sdk = PythonSdkUtil.findPythonSdk(projectModule); if (sdk != null) { final LanguageLevel level = PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk); if (maxLevel == null || maxLevel.isOlderThan(level)) { maxLevel = level; } } } if (maxLevel != null) { return maxLevel; } } return LanguageLevel.getDefault(); } /** * Returns Python language level for a virtual file. * * @see LanguageLevel#forElement */ @NotNull public static LanguageLevel getLanguageLevelForVirtualFile(@NotNull Project project, @NotNull VirtualFile virtualFile) { if (virtualFile instanceof VirtualFileWindow) { virtualFile = ((VirtualFileWindow)virtualFile).getDelegate(); } virtualFile = BackedVirtualFile.getOriginFileIfBacked(virtualFile); final LanguageLevel forced = LanguageLevel.FORCE_LANGUAGE_LEVEL; if (ApplicationManager.getApplication().isUnitTestMode() && forced != null) return forced; final LanguageLevel specified = specifiedFileLanguageLevel(virtualFile); if (specified != null) return specified; final Sdk sdk = virtualFile instanceof LightVirtualFile ? null : getFileSdk(project, virtualFile); if (sdk != null) return PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk); return guessLanguageLevelWithCaching(project); } private final class UpdateRootTask implements Runnable { @NotNull private final Project myProject; @NotNull private final VirtualFile myRoot; @NotNull private final LanguageLevel myLanguageLevel; private final boolean myShouldSuppressSizeLimit; UpdateRootTask(@NotNull Project project, @NotNull VirtualFile root, @NotNull LanguageLevel languageLevel, boolean shouldSuppressSizeLimit) { myProject = project; myRoot = root; myLanguageLevel = languageLevel; myShouldSuppressSizeLimit = shouldSuppressSizeLimit; } @Override public void run() { if (myProject.isDisposed() || !ReadAction.compute(() -> myRoot.isValid())) return; final PushedFilePropertiesUpdater propertiesUpdater = PushedFilePropertiesUpdater.getInstance(myProject); VfsUtilCore.visitChildrenRecursively(myRoot, new VirtualFileVisitor<Void>() { @Override public boolean visitFile(@NotNull VirtualFile file) { return ReadAction.compute(() -> { if (PyModuleService.getInstance().isFileIgnored(file)) { return false; } if (file.isDirectory()) { propertiesUpdater.findAndUpdateValue( file, PythonLanguageLevelPusher.this, LanguageLevel.toPythonVersion(myLanguageLevel) ); } if (myShouldSuppressSizeLimit) { SingleRootFileViewProvider.doNotCheckFileSizeLimit(file); } return true; }); } }); } @Override public String toString() { return "UpdateRootTask{" + "myRoot=" + myRoot + ", myLanguageLevel=" + myLanguageLevel + '}'; } } @TestOnly public static void setForcedLanguageLevel(@NotNull Project project, @Nullable LanguageLevel languageLevel) { LanguageLevel.FORCE_LANGUAGE_LEVEL = languageLevel; PushedFilePropertiesUpdater.getInstance(project).pushAll(new PythonLanguageLevelPusher()); } public static void specifyFileLanguageLevel(@NotNull VirtualFile file, @Nullable LanguageLevel languageLevel) { file.putUserData(KEY, LanguageLevel.toPythonVersion(languageLevel)); } @Nullable private static LanguageLevel specifiedFileLanguageLevel(@Nullable VirtualFile file) { if (file == null) return null; final LanguageLevel specified = LanguageLevel.fromPythonVersion(file.getUserData(KEY)); if (file.isDirectory()) { return specified; } else { return specified == null ? specifiedFileLanguageLevel(file.getParent()) : specified; } } public void flushLanguageLevelCache() { myModuleSdks.clear(); } }
python/python-psi-impl/src/com/jetbrains/python/psi/impl/PythonLanguageLevelPusher.java
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python.psi.impl; import com.google.common.collect.Maps; import com.intellij.ProjectTopics; import com.intellij.injected.editor.VirtualFileWindow; import com.intellij.notebook.editor.BackedVirtualFile; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.fileTypes.FileTypeRegistry; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.project.DumbModeTask; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.*; import com.intellij.openapi.roots.impl.FilePropertyPusher; import com.intellij.openapi.roots.impl.PushedFilePropertiesUpdater; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileVisitor; import com.intellij.openapi.vfs.newvfs.FileAttribute; import com.intellij.psi.SingleRootFileViewProvider; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.indexing.IndexingBundle; import com.intellij.util.io.DataInputOutputUtil; import com.intellij.util.messages.MessageBus; import com.jetbrains.python.PythonCodeStyleService; import com.jetbrains.python.PythonFileType; import com.jetbrains.python.PythonRuntimeService; import com.jetbrains.python.codeInsight.typing.PyTypeShed; import com.jetbrains.python.module.PyModuleService; import com.jetbrains.python.psi.LanguageLevel; import com.jetbrains.python.psi.resolve.PythonSdkPathCache; import com.jetbrains.python.sdk.PythonSdkUtil; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.*; import java.util.stream.Collectors; /** * @author yole */ public class PythonLanguageLevelPusher implements FilePropertyPusher<String> { private static final Key<String> KEY = new Key<>("python.language.level"); /* It so happens that no single language level is compatible with more than one other. So a map suffices for representation*/ private static final Map<LanguageLevel, LanguageLevel> COMPATIBLE_LEVELS; static { Map<LanguageLevel, LanguageLevel> compatLevels = Maps.newEnumMap(LanguageLevel.class); addCompatiblePair(compatLevels, LanguageLevel.PYTHON26, LanguageLevel.PYTHON27); addCompatiblePair(compatLevels, LanguageLevel.PYTHON33, LanguageLevel.PYTHON34); COMPATIBLE_LEVELS = Maps.immutableEnumMap(compatLevels); } private static void addCompatiblePair(Map<LanguageLevel, LanguageLevel> levels, LanguageLevel l1, LanguageLevel l2) { levels.put(l1, l2); levels.put(l2, l1); } private final Map<Module, Sdk> myModuleSdks = ContainerUtil.createWeakMap(); @Override public void initExtra(@NotNull Project project) { final Map<Module, Sdk> moduleSdks = getPythonModuleSdks(project); final Set<Sdk> distinctSdks = StreamEx.ofValues(moduleSdks).nonNull().collect(Collectors.toCollection(LinkedHashSet::new)); myModuleSdks.putAll(moduleSdks); resetProjectLanguageLevel(project); updateSdkLanguageLevels(project, distinctSdks); guessLanguageLevelWithCaching(project); } @Override @NotNull public Key<String> getFileDataKey() { return KEY; } @Override public boolean pushDirectoriesOnly() { return true; } @Override @NotNull public String getDefaultValue() { return LanguageLevel.toPythonVersion(LanguageLevel.getDefault()); } @Override @Nullable public String getImmediateValue(@NotNull Project project, @Nullable VirtualFile file) { return null; } @Nullable private static LanguageLevel getFileLanguageLevel(@NotNull Project project, @Nullable VirtualFile file) { if (ApplicationManager.getApplication().isUnitTestMode() && LanguageLevel.FORCE_LANGUAGE_LEVEL != null) { return LanguageLevel.FORCE_LANGUAGE_LEVEL; } if (file == null) return null; final Sdk sdk = getFileSdk(project, file); if (sdk != null) { return PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk); } return guessLanguageLevelWithCaching(project); } @Nullable private static Sdk getFileSdk(@NotNull Project project, @NotNull VirtualFile file) { final Module module = ModuleUtilCore.findModuleForFile(file, project); if (module != null) { final Sdk sdk = PythonSdkUtil.findPythonSdk(module); if (sdk != null) { return sdk; } return null; } else { return findSdkForFileOutsideTheProject(project, file); } } @Nullable private static Sdk findSdkForFileOutsideTheProject(Project project, VirtualFile file) { if (file != null) { final List<OrderEntry> orderEntries = ProjectRootManager.getInstance(project).getFileIndex().getOrderEntriesForFile(file); for (OrderEntry orderEntry : orderEntries) { if (orderEntry instanceof JdkOrderEntry) { return ((JdkOrderEntry)orderEntry).getJdk(); } } } return null; } @Override @NotNull public String getImmediateValue(@NotNull Module module) { if (ApplicationManager.getApplication().isUnitTestMode() && LanguageLevel.FORCE_LANGUAGE_LEVEL != null) { return LanguageLevel.toPythonVersion(LanguageLevel.FORCE_LANGUAGE_LEVEL); } final Sdk sdk = PythonSdkUtil.findPythonSdk(module); return LanguageLevel.toPythonVersion(PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk)); } @Override public boolean acceptsFile(@NotNull VirtualFile file, @NotNull Project project) { return false; } @Override public boolean acceptsDirectory(@NotNull VirtualFile file, @NotNull Project project) { return true; } private static final FileAttribute PERSISTENCE = new FileAttribute("python_language_level_persistence", 2, true); private static boolean areLanguageLevelsCompatible(LanguageLevel oldLevel, LanguageLevel newLevel) { return oldLevel != null && newLevel != null && COMPATIBLE_LEVELS.get(oldLevel) == newLevel; } @Override public void persistAttribute(@NotNull Project project, @NotNull VirtualFile fileOrDir, @NotNull String value) throws IOException { final LanguageLevel level = LanguageLevel.fromPythonVersion(value); final DataInputStream iStream = PERSISTENCE.readAttribute(fileOrDir); LanguageLevel oldLanguageLevel = null; if (iStream != null) { try { final int oldLevelOrdinal = DataInputOutputUtil.readINT(iStream); if (oldLevelOrdinal == level.ordinal()) return; oldLanguageLevel = Arrays.stream(LanguageLevel.values()).filter(it -> it.ordinal() == oldLevelOrdinal).findFirst().orElse(null); } finally { iStream.close(); } } try (DataOutputStream oStream = PERSISTENCE.writeAttribute(fileOrDir)) { DataInputOutputUtil.writeINT(oStream, level.ordinal()); } if (!areLanguageLevelsCompatible(oldLanguageLevel, level) || !ProjectFileIndex.getInstance(project).isInContent(fileOrDir)) { PushedFilePropertiesUpdater.getInstance(project).filePropertiesChanged(fileOrDir, PythonLanguageLevelPusher::isPythonFile); } for (VirtualFile child : fileOrDir.getChildren()) { if (!child.isDirectory() && isPythonFile(child)) { clearSdkPathCache(child); } } } private static boolean isPythonFile(VirtualFile child) { return PythonFileType.INSTANCE.equals(FileTypeRegistry.getInstance().getFileTypeByFileName(child.getNameSequence())); } private static void clearSdkPathCache(@NotNull final VirtualFile child) { final Project[] projects = ProjectManager.getInstance().getOpenProjects(); for (Project project : projects) { final Sdk sdk = getFileSdk(project, child); if (sdk != null) { final PythonSdkPathCache pathCache = PythonSdkPathCache.getInstance(project, sdk); pathCache.clearCache(); } } } @Override public void afterRootsChanged(@NotNull final Project project) { final Map<Module, Sdk> moduleSdks = getPythonModuleSdks(project); final Set<Sdk> distinctSdks = StreamEx.ofValues(moduleSdks).nonNull().collect(Collectors.toCollection(LinkedHashSet::new)); final boolean needToReparseOpenFiles = StreamEx.of(moduleSdks.entrySet()).anyMatch((entry -> { final Module module = entry.getKey(); final Sdk newSdk = entry.getValue(); final Sdk oldSdk = myModuleSdks.get(module); return myModuleSdks.containsKey(module) && newSdk != oldSdk; })); myModuleSdks.putAll(moduleSdks); resetProjectLanguageLevel(project); updateSdkLanguageLevels(project, distinctSdks); guessLanguageLevelWithCaching(project); if (needToReparseOpenFiles) { ApplicationManager.getApplication().invokeLater(() -> { if (project.isDisposed()) { return; } PythonCodeStyleService.getInstance().reparseOpenEditorFiles(project); }); } } @NotNull private static Map<Module, Sdk> getPythonModuleSdks(@NotNull Project project) { final Map<Module, Sdk> result = new LinkedHashMap<>(); for (Module module : ModuleManager.getInstance(project).getModules()) { if (isPythonModule(module)) { result.put(module, PythonSdkUtil.findPythonSdk(module)); } } return result; } private static boolean isPythonModule(@NotNull final Module module) { return PyModuleService.getInstance().isPythonModule(module); } private void updateSdkLanguageLevels(@NotNull Project project, @NotNull Set<Sdk> sdks) { if (sdks.isEmpty()) return; final DumbService dumbService = DumbService.getInstance(project); final DumbModeTask task = new DumbModeTask() { @Override public void performInDumbMode(@NotNull ProgressIndicator indicator) { if (project.isDisposed()) return; //final PerformanceWatcher.Snapshot snapshot = PerformanceWatcher.takeSnapshot(); indicator.setIndeterminate(true); indicator.setText(IndexingBundle.message("progress.indexing.scanning")); final List<Runnable> tasks = ReadAction.compute(() -> getRootUpdateTasks(project, sdks)); PushedFilePropertiesUpdater.getInstance(project).runConcurrentlyIfPossible(tasks); //if (!ApplicationManager.getApplication().isUnitTestMode()) { // snapshot.logResponsivenessSinceCreation("Pushing Python language level to " + tasks.size() + " roots in " + sdks.size() + // " SDKs"); //} } }; project.getMessageBus().connect(task).subscribe(ProjectTopics.PROJECT_ROOTS, new ModuleRootListener() { @Override public void rootsChanged(@NotNull ModuleRootEvent event) { DumbService.getInstance(project).cancelTask(task); } }); dumbService.queueTask(task); } private List<Runnable> getRootUpdateTasks(@NotNull Project project, @NotNull Set<Sdk> sdks) { final List<Runnable> results = new ArrayList<>(); for (Sdk sdk : sdks) { final LanguageLevel languageLevel = PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk); for (VirtualFile root : sdk.getRootProvider().getFiles(OrderRootType.CLASSES)) { if (!root.isValid() || PyTypeShed.INSTANCE.isInside(root)) { continue; } final VirtualFile parent = root.getParent(); final boolean shouldSuppressSizeLimit = parent != null && parent.getName().equals(PythonSdkUtil.SKELETON_DIR_NAME); results.add(new UpdateRootTask(project, root, languageLevel, shouldSuppressSizeLimit)); } } return results; } @NotNull private static LanguageLevel guessLanguageLevelWithCaching(@NotNull Project project) { LanguageLevel languageLevel = LanguageLevel.fromPythonVersion(project.getUserData(KEY)); if (languageLevel == null) { languageLevel = guessLanguageLevel(project); project.putUserData(KEY, LanguageLevel.toPythonVersion(languageLevel)); } return languageLevel; } private static void resetProjectLanguageLevel(@NotNull Project project) { project.putUserData(KEY, null); } @NotNull private static LanguageLevel guessLanguageLevel(@NotNull Project project) { final ModuleManager moduleManager = ModuleManager.getInstance(project); if (moduleManager != null) { LanguageLevel maxLevel = null; for (Module projectModule : moduleManager.getModules()) { final Sdk sdk = PythonSdkUtil.findPythonSdk(projectModule); if (sdk != null) { final LanguageLevel level = PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk); if (maxLevel == null || maxLevel.isOlderThan(level)) { maxLevel = level; } } } if (maxLevel != null) { return maxLevel; } } return LanguageLevel.getDefault(); } /** * Returns Python language level for a virtual file. * * @see LanguageLevel#forElement */ @NotNull public static LanguageLevel getLanguageLevelForVirtualFile(@NotNull Project project, @NotNull VirtualFile virtualFile) { if (virtualFile instanceof VirtualFileWindow) { virtualFile = ((VirtualFileWindow)virtualFile).getDelegate(); } virtualFile = BackedVirtualFile.getOriginFileIfBacked(virtualFile); // Most of the cases should be handled by this one, PyLanguageLevelPusher pushes folders only final VirtualFile folder = virtualFile.getParent(); if (folder != null) { final LanguageLevel folderLevel = specifiedFileLanguageLevel(folder); if (folderLevel != null) { return folderLevel; } final LanguageLevel fileLevel = getFileLanguageLevel(project, virtualFile); if (fileLevel != null) { return fileLevel; } } else { // However this allows us to setup language level per file manually // in case when it is LightVirtualFile final LanguageLevel level = specifiedFileLanguageLevel(virtualFile); if (level != null) return level; if (ApplicationManager.getApplication().isUnitTestMode()) { final LanguageLevel languageLevel = LanguageLevel.FORCE_LANGUAGE_LEVEL; if (languageLevel != null) { return languageLevel; } } } return guessLanguageLevelWithCaching(project); } private final class UpdateRootTask implements Runnable { @NotNull private final Project myProject; @NotNull private final VirtualFile myRoot; @NotNull private final LanguageLevel myLanguageLevel; private final boolean myShouldSuppressSizeLimit; UpdateRootTask(@NotNull Project project, @NotNull VirtualFile root, @NotNull LanguageLevel languageLevel, boolean shouldSuppressSizeLimit) { myProject = project; myRoot = root; myLanguageLevel = languageLevel; myShouldSuppressSizeLimit = shouldSuppressSizeLimit; } @Override public void run() { if (myProject.isDisposed() || !ReadAction.compute(() -> myRoot.isValid())) return; final PushedFilePropertiesUpdater propertiesUpdater = PushedFilePropertiesUpdater.getInstance(myProject); VfsUtilCore.visitChildrenRecursively(myRoot, new VirtualFileVisitor<Void>() { @Override public boolean visitFile(@NotNull VirtualFile file) { return ReadAction.compute(() -> { if (PyModuleService.getInstance().isFileIgnored(file)) { return false; } if (file.isDirectory()) { propertiesUpdater.findAndUpdateValue( file, PythonLanguageLevelPusher.this, LanguageLevel.toPythonVersion(myLanguageLevel) ); } if (myShouldSuppressSizeLimit) { SingleRootFileViewProvider.doNotCheckFileSizeLimit(file); } return true; }); } }); } @Override public String toString() { return "UpdateRootTask{" + "myRoot=" + myRoot + ", myLanguageLevel=" + myLanguageLevel + '}'; } } @TestOnly public static void setForcedLanguageLevel(@NotNull Project project, @Nullable LanguageLevel languageLevel) { LanguageLevel.FORCE_LANGUAGE_LEVEL = languageLevel; PushedFilePropertiesUpdater.getInstance(project).pushAll(new PythonLanguageLevelPusher()); } public static void specifyFileLanguageLevel(@NotNull VirtualFile file, @Nullable LanguageLevel languageLevel) { file.putUserData(KEY, LanguageLevel.toPythonVersion(languageLevel)); } @Nullable private static LanguageLevel specifiedFileLanguageLevel(@NotNull VirtualFile file) { return LanguageLevel.fromPythonVersion(file.getUserData(KEY)); } public void flushLanguageLevelCache() { myModuleSdks.clear(); } }
Unify logic of calculating language level for a file 1. Forced language level via test only method 2. Specified language level via separate method for programmatically created file 3. Pre-calculated language level for the containing directory 4. Language level of the file sdk 5. Maximum language level among project modules GitOrigin-RevId: b6a2b382807c4170020fd6ad045ddbd7523add64
python/python-psi-impl/src/com/jetbrains/python/psi/impl/PythonLanguageLevelPusher.java
Unify logic of calculating language level for a file
<ide><path>ython/python-psi-impl/src/com/jetbrains/python/psi/impl/PythonLanguageLevelPusher.java <ide> import com.intellij.openapi.vfs.VirtualFileVisitor; <ide> import com.intellij.openapi.vfs.newvfs.FileAttribute; <ide> import com.intellij.psi.SingleRootFileViewProvider; <add>import com.intellij.testFramework.LightVirtualFile; <ide> import com.intellij.util.containers.ContainerUtil; <ide> import com.intellij.util.indexing.IndexingBundle; <ide> import com.intellij.util.io.DataInputOutputUtil; <ide> } <ide> <ide> @Nullable <del> private static LanguageLevel getFileLanguageLevel(@NotNull Project project, @Nullable VirtualFile file) { <del> if (ApplicationManager.getApplication().isUnitTestMode() && LanguageLevel.FORCE_LANGUAGE_LEVEL != null) { <del> return LanguageLevel.FORCE_LANGUAGE_LEVEL; <del> } <del> if (file == null) return null; <del> final Sdk sdk = getFileSdk(project, file); <del> if (sdk != null) { <del> return PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk); <del> } <del> return guessLanguageLevelWithCaching(project); <del> } <del> <del> @Nullable <ide> private static Sdk getFileSdk(@NotNull Project project, @NotNull VirtualFile file) { <ide> final Module module = ModuleUtilCore.findModuleForFile(file, project); <ide> if (module != null) { <ide> } <ide> virtualFile = BackedVirtualFile.getOriginFileIfBacked(virtualFile); <ide> <del> // Most of the cases should be handled by this one, PyLanguageLevelPusher pushes folders only <del> final VirtualFile folder = virtualFile.getParent(); <del> if (folder != null) { <del> final LanguageLevel folderLevel = specifiedFileLanguageLevel(folder); <del> if (folderLevel != null) { <del> return folderLevel; <del> } <del> final LanguageLevel fileLevel = getFileLanguageLevel(project, virtualFile); <del> if (fileLevel != null) { <del> return fileLevel; <del> } <del> } <del> else { <del> // However this allows us to setup language level per file manually <del> // in case when it is LightVirtualFile <del> final LanguageLevel level = specifiedFileLanguageLevel(virtualFile); <del> if (level != null) return level; <del> <del> if (ApplicationManager.getApplication().isUnitTestMode()) { <del> final LanguageLevel languageLevel = LanguageLevel.FORCE_LANGUAGE_LEVEL; <del> if (languageLevel != null) { <del> return languageLevel; <del> } <del> } <del> } <add> final LanguageLevel forced = LanguageLevel.FORCE_LANGUAGE_LEVEL; <add> if (ApplicationManager.getApplication().isUnitTestMode() && forced != null) return forced; <add> <add> final LanguageLevel specified = specifiedFileLanguageLevel(virtualFile); <add> if (specified != null) return specified; <add> <add> final Sdk sdk = virtualFile instanceof LightVirtualFile ? null : getFileSdk(project, virtualFile); <add> if (sdk != null) return PythonRuntimeService.getInstance().getLanguageLevelForSdk(sdk); <add> <ide> return guessLanguageLevelWithCaching(project); <ide> } <ide> <ide> } <ide> <ide> @Nullable <del> private static LanguageLevel specifiedFileLanguageLevel(@NotNull VirtualFile file) { <del> return LanguageLevel.fromPythonVersion(file.getUserData(KEY)); <add> private static LanguageLevel specifiedFileLanguageLevel(@Nullable VirtualFile file) { <add> if (file == null) return null; <add> <add> final LanguageLevel specified = LanguageLevel.fromPythonVersion(file.getUserData(KEY)); <add> if (file.isDirectory()) { <add> return specified; <add> } <add> else { <add> return specified == null ? specifiedFileLanguageLevel(file.getParent()) : specified; <add> } <ide> } <ide> <ide> public void flushLanguageLevelCache() {
Java
apache-2.0
8c7b2d8cf47a14e201f3b96701436d74bb98a2c8
0
godfreyhe/flink,tony810430/flink,jinglining/flink,aljoscha/flink,tzulitai/flink,clarkyzl/flink,darionyaphet/flink,kl0u/flink,jinglining/flink,tony810430/flink,sunjincheng121/flink,aljoscha/flink,aljoscha/flink,hequn8128/flink,aljoscha/flink,bowenli86/flink,greghogan/flink,tillrohrmann/flink,sunjincheng121/flink,aljoscha/flink,StephanEwen/incubator-flink,twalthr/flink,hequn8128/flink,tzulitai/flink,zjureel/flink,StephanEwen/incubator-flink,gyfora/flink,gyfora/flink,jinglining/flink,apache/flink,kaibozhou/flink,zentol/flink,zjureel/flink,lincoln-lil/flink,GJL/flink,mbode/flink,zentol/flink,zentol/flink,zjureel/flink,rmetzger/flink,rmetzger/flink,xccui/flink,greghogan/flink,kl0u/flink,lincoln-lil/flink,hequn8128/flink,rmetzger/flink,kaibozhou/flink,kaibozhou/flink,godfreyhe/flink,apache/flink,tzulitai/flink,bowenli86/flink,xccui/flink,GJL/flink,wwjiang007/flink,twalthr/flink,apache/flink,sunjincheng121/flink,xccui/flink,wwjiang007/flink,rmetzger/flink,apache/flink,gyfora/flink,mbode/flink,godfreyhe/flink,tzulitai/flink,godfreyhe/flink,tony810430/flink,zentol/flink,xccui/flink,StephanEwen/incubator-flink,tzulitai/flink,aljoscha/flink,zjureel/flink,tzulitai/flink,apache/flink,darionyaphet/flink,jinglining/flink,gyfora/flink,tony810430/flink,gyfora/flink,zjureel/flink,lincoln-lil/flink,StephanEwen/incubator-flink,StephanEwen/incubator-flink,GJL/flink,hequn8128/flink,darionyaphet/flink,greghogan/flink,tillrohrmann/flink,jinglining/flink,godfreyhe/flink,xccui/flink,kaibozhou/flink,darionyaphet/flink,sunjincheng121/flink,wwjiang007/flink,godfreyhe/flink,darionyaphet/flink,tony810430/flink,twalthr/flink,tillrohrmann/flink,GJL/flink,bowenli86/flink,mbode/flink,lincoln-lil/flink,clarkyzl/flink,tillrohrmann/flink,hequn8128/flink,apache/flink,kl0u/flink,zentol/flink,wwjiang007/flink,bowenli86/flink,clarkyzl/flink,xccui/flink,twalthr/flink,kaibozhou/flink,gyfora/flink,wwjiang007/flink,twalthr/flink,bowenli86/flink,twalthr/flink,zjureel/flink,tony810430/flink,bowenli86/flink,rmetzger/flink,jinglining/flink,twalthr/flink,wwjiang007/flink,greghogan/flink,clarkyzl/flink,lincoln-lil/flink,mbode/flink,StephanEwen/incubator-flink,tony810430/flink,tillrohrmann/flink,GJL/flink,hequn8128/flink,gyfora/flink,GJL/flink,lincoln-lil/flink,greghogan/flink,mbode/flink,rmetzger/flink,greghogan/flink,xccui/flink,sunjincheng121/flink,tillrohrmann/flink,zjureel/flink,kaibozhou/flink,rmetzger/flink,tillrohrmann/flink,sunjincheng121/flink,kl0u/flink,clarkyzl/flink,godfreyhe/flink,wwjiang007/flink,apache/flink,kl0u/flink,lincoln-lil/flink,zentol/flink,zentol/flink,kl0u/flink
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.disk; import java.io.EOFException; import java.io.IOException; import java.util.List; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.core.memory.MemorySegmentSource; import org.apache.flink.runtime.memory.AbstractPagedOutputView; import org.apache.flink.util.MathUtils; /** * The list with the full segments contains at any point all completely full segments, plus the segment that is * currently filled. */ public class SimpleCollectingOutputView extends AbstractPagedOutputView { private final List<MemorySegment> fullSegments; private final MemorySegmentSource memorySource; private final int segmentSizeBits; private int segmentNum; public SimpleCollectingOutputView(List<MemorySegment> fullSegmentTarget, MemorySegmentSource memSource, int segmentSize) { super(memSource.nextSegment(), segmentSize, 0); this.segmentSizeBits = MathUtils.log2strict(segmentSize); this.fullSegments = fullSegmentTarget; this.memorySource = memSource; this.fullSegments.add(getCurrentSegment()); } public void reset() { if (this.fullSegments.size() != 0) { throw new IllegalStateException("The target list still contains memory segments."); } clear(); try { advance(); } catch (IOException ioex) { throw new RuntimeException("Error getting first segment for record collector.", ioex); } this.segmentNum = 0; } @Override protected MemorySegment nextSegment(MemorySegment current, int positionInCurrent) throws EOFException { final MemorySegment next = this.memorySource.nextSegment(); if (next != null) { this.fullSegments.add(next); this.segmentNum++; return next; } else { throw new EOFException("Can't collect further: memorySource depleted"); } } public long getCurrentOffset() { return (((long) this.segmentNum) << this.segmentSizeBits) + getCurrentPositionInSegment(); } }
flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/SimpleCollectingOutputView.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.disk; import java.io.EOFException; import java.io.IOException; import java.util.List; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.core.memory.MemorySegmentSource; import org.apache.flink.runtime.memory.AbstractPagedOutputView; import org.apache.flink.util.MathUtils; /** * The list with the full segments contains at any point all completely full segments, plus the segment that is * currently filled. */ public class SimpleCollectingOutputView extends AbstractPagedOutputView { private final List<MemorySegment> fullSegments; private final MemorySegmentSource memorySource; private final int segmentSizeBits; private int segmentNum; public SimpleCollectingOutputView(List<MemorySegment> fullSegmentTarget, MemorySegmentSource memSource, int segmentSize) { super(memSource.nextSegment(), segmentSize, 0); this.segmentSizeBits = MathUtils.log2strict(segmentSize); this.fullSegments = fullSegmentTarget; this.memorySource = memSource; this.fullSegments.add(getCurrentSegment()); } public void reset() { if (this.fullSegments.size() != 0) { throw new IllegalStateException("The target list still contains memory segments."); } clear(); try { advance(); } catch (IOException ioex) { throw new RuntimeException("Error getting first segment for record collector.", ioex); } this.segmentNum = 0; } @Override protected MemorySegment nextSegment(MemorySegment current, int positionInCurrent) throws EOFException { final MemorySegment next = this.memorySource.nextSegment(); if (next != null) { this.fullSegments.add(next); this.segmentNum++; return next; } else { throw new EOFException(); } } public long getCurrentOffset() { return (((long) this.segmentNum) << this.segmentSizeBits) + getCurrentPositionInSegment(); } }
[FLINK-13097] Make the cause for EOFException explicit (buffer depletion)
flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/SimpleCollectingOutputView.java
[FLINK-13097] Make the cause for EOFException explicit (buffer depletion)
<ide><path>link-runtime/src/main/java/org/apache/flink/runtime/io/disk/SimpleCollectingOutputView.java <ide> this.segmentNum++; <ide> return next; <ide> } else { <del> throw new EOFException(); <add> throw new EOFException("Can't collect further: memorySource depleted"); <ide> } <ide> } <ide>
Java
mit
af04963d764f468b760957de560eff9b6bd21063
0
Gnewt/bainbridgefirst
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.usfirst.frc4915.ArcadeDriveRobot; public class Version { private static final String VERSION = "v4.10.4"; // Should be ready for practice // Implements safety changes // --Safety enabled for both Harvester a:nd launcher motors // --Removed delay on drive straight command // --Added debug info for the WindingMotor's Safety // Adds default behaviors for Winding and Harvester motors. // Testing the rangefinder // Should display the distance in inches from wall // MoveUpToDistance moves until it is 3 feet from the wall public static String getVersion() { return VERSION; } }
ArcadeDriveRobot/src/org/usfirst/frc4915/ArcadeDriveRobot/Version.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.usfirst.frc4915.ArcadeDriveRobot; public class Version { private static final String VERSION = "v4.10.3"; // Should be ready for practice // Implements safety changes // --Safety enabled for both Harvester a:nd launcher motors // --Removed delay on drive straight command // --Added debug info for the WindingMotor's Safety // Adds default behaviors for Winding and Harvester motors. // Testing the rangefinder // Should display the distance in inches from wall // MoveUpToDistance moves until it is 3 feet from the wall public static String getVersion() { return VERSION; } }
Version number change
ArcadeDriveRobot/src/org/usfirst/frc4915/ArcadeDriveRobot/Version.java
Version number change
<ide><path>rcadeDriveRobot/src/org/usfirst/frc4915/ArcadeDriveRobot/Version.java <ide> <ide> public class Version { <ide> <del> private static final String VERSION = "v4.10.3"; <add> private static final String VERSION = "v4.10.4"; <ide> // Should be ready for practice <ide> // Implements safety changes <ide> // --Safety enabled for both Harvester a:nd launcher motors
JavaScript
mit
a4ff4d964f82aa29f0e5e69988ea6b4507d9d6c6
0
GameDistribution/GD-HTML5,GameDistribution/GD-HTML5
'use strict'; import PackageJSON from '../package.json'; import VideoAd from './components/VideoAd'; import EventBus from './components/EventBus'; import ImplementationTest from './components/ImplementationTest'; import {dankLog} from './modules/dankLog'; import { extendDefaults, getParentUrl, getParentDomain, getCookie, } from './modules/common'; let instance = null; /** * SDK */ class SDK { /** * Constructor of SDK. * @param {Object} options * @return {*} */ constructor(options) { // Make this a singleton. if (instance) { return instance; } else { instance = this; } // Set some defaults. We replace them with real given // values further down. const defaults = { debug: false, gameId: '4f3d7d38d24b740c95da2b03dc3a2333', userId: '31D29405-8D37-4270-BF7C-8D99CCF0177F-s1', advertisementSettings: {}, resumeGame: function() { // ... }, pauseGame: function() { // ... }, onEvent: function(event) { // ... }, onInit: function(data) { // ... }, onError: function(data) { // ... }, }; if (options) { this.options = extendDefaults(defaults, options); } else { this.options = defaults; } // Open the debug console when debugging is enabled. try { if (this.options.debug || localStorage.getItem('gd_debug')) { this.openConsole(); } } catch (error) { console.log(error); } // Set a version banner within the developer console. const date = new Date(); const versionInformation = { version: PackageJSON.version, date: date.getDate() + '-' + (date.getMonth() + 1) + '-' + date.getFullYear(), time: date.getHours() + ':' + date.getMinutes(), }; const banner = console.log( '%c %c %c Gamedistribution.com HTML5 SDK | Version: ' + versionInformation.version + ' (' + versionInformation.date + ' ' + versionInformation.time + ') %c %c %c', 'background: #9854d8', 'background: #6c2ca7', 'color: #fff; background: #450f78;', 'background: #6c2ca7', 'background: #9854d8', 'background: #ffffff'); /* eslint-disable */ console.log.apply(console, banner); /* eslint-enable */ // Get referrer domain data. const referrer = getParentUrl(); const parentDomain = getParentDomain(); // Call Google Analytics. this._googleAnalytics(); // Call Death Star. this._deathStar(); // Setup all event listeners. // We also send a Google Analytics event for each one of our events. this.eventBus = new EventBus(); this.eventBus.gameId = this.options.gameId; // SDK events this.eventBus.subscribe('SDK_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SDK_ERROR', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SDK_GAME_DATA_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SDK_GAME_START', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SDK_GAME_PAUSE', (arg) => this._onEvent(arg)); // IMA HTML5 SDK events this.eventBus.subscribe('AD_SDK_LOADER_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SDK_MANAGER_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SDK_REQUEST_ADS', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SDK_ERROR', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SDK_FINISHED', (arg) => this._onEvent(arg)); // Ad events this.eventBus.subscribe('AD_CANCELED', (arg) => { this._onEvent(arg); this.onResumeGame( 'Advertisement error, no worries, start / resume the game.', 'warning'); }); this.eventBus.subscribe('AD_ERROR', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SAFETY_TIMER', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_BREAK_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_METADATA', (arg) => this._onEvent(arg)); this.eventBus.subscribe('ALL_ADS_COMPLETED', (arg) => { this._onEvent(arg); this.onResumeGame( 'Advertisement(s) are done. Start / resume the game.', 'success'); // Do a request to flag the sdk as available within the catalog. // This flagging allows our developer to do a request to publish // this game, otherwise this option would remain unavailable. const protocol = ('https:' === document.location.protocol) ? 'https:' : 'http:'; if (referrer === protocol + '//gamedistribution.com/controlpanel/game/edit/' + this.options.gameId) { (new Image()).src = 'https://game.api.gamedistribution.com/game/updateapi/' + this.options.gameId; } }); this.eventBus.subscribe('CLICK', (arg) => this._onEvent(arg)); this.eventBus.subscribe('COMPLETE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('CONTENT_PAUSE_REQUESTED', (arg) => { this._onEvent(arg); this.onPauseGame('New advertisements requested and loaded', 'success'); }); this.eventBus.subscribe('CONTENT_RESUME_REQUESTED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('DURATION_CHANGE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('FIRST_QUARTILE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('IMPRESSION', (arg) => this._onEvent(arg)); this.eventBus.subscribe('INTERACTION', (arg) => this._onEvent(arg)); this.eventBus.subscribe('LINEAR_CHANGED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('LOADED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('LOG', (arg) => this._onEvent(arg)); this.eventBus.subscribe('MIDPOINT', (arg) => this._onEvent(arg)); this.eventBus.subscribe('PAUSED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('RESUMED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SKIPPABLE_STATE_CHANGED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SKIPPED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('STARTED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('THIRD_QUARTILE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('USER_CLOSE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('VOLUME_CHANGED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('VOLUME_MUTED', (arg) => this._onEvent(arg)); // Only allow ads after the preroll and after a certain amount of time. // This time restriction is available from gameData. this.adRequestTimer = undefined; // Game API. // If it fails we use default data, so this should always resolve. let gameData = { gameId: '49258a0e497c42b5b5d87887f24d27a6', // Jewel Burst. affiliate: 'A-GAMEDIST', advertisements: true, preroll: true, midroll: 2 * 60000, title: '', tags: '', category: '', }; const gameDataPromise = new Promise((resolve) => { const gameDataUrl = 'https://game.api.gamedistribution.com/' + 'game/get/' + this.options.gameId + '?domain=' + parentDomain; const gameDataRequest = new Request(gameDataUrl, {method: 'GET'}); fetch(gameDataRequest). then((response) => { const contentType = response.headers.get('content-type'); if (contentType && contentType.includes('application/json')) { return response.json(); } else { throw new TypeError('Oops, we didn\'t get JSON!'); } }). then(json => { if (!json.success && json.error) { dankLog('SDK_GAME_DATA_READY', json.error, 'warning'); } try { const retrievedGameData = { gameId: json.result.game.gameMd5, affiliate: json.result.affiliate.affiliateId, advertisements: json.result.game.enableAds, preroll: json.result.game.preRoll, midroll: json.result.game.timeAds * 60000, title: json.result.game.title, category: json.result.game.category, tags: json.result.game.tags, }; gameData = extendDefaults(gameData, retrievedGameData); dankLog('SDK_GAME_DATA_READY', gameData, 'success'); // Try to send some additional analytics to Death Star. try { let tagsArray = []; gameData.tags.forEach((tag) => { tagsArray.push(tag.title.toLowerCase()); }); ga('gd.set', 'dimension2', gameData.title.toLowerCase()); ga('gd.set', 'dimension3', tagsArray.join(', ')); } catch (error) { console.log(error); } } catch (error) { dankLog('SDK_GAME_DATA_READY', error, 'warning'); } resolve(gameData); }). catch((error) => { dankLog('SDK_GAME_DATA_READY', error, 'success'); resolve(gameData); }); }); // Tunnl. // Get the affiliate id from Tunnl. // If it fails we continue the game, so this should always resolve. const adTagIdPromise = new Promise((resolve) => { const adTagIdUrl = 'https://ana.tunnl.com/at?id=' + this.options.gameId + '&pageurl=' + parentDomain + '&type=1'; const adTagIdRequest = new Request(adTagIdUrl, {method: 'GET'}); let adTagId = 'T-17112073197'; fetch(adTagIdRequest).then(response => { const contentType = response.headers.get('content-type'); if (contentType && contentType.includes('application/json')) { return response.json(); } else { throw new TypeError('Oops, we didn\'t get JSON!'); } }).then(json => { if (json.AdTagId) { adTagId = json.AdTagId; dankLog('SDK_TAG_ID_READY', adTagId, 'success'); resolve(adTagId); } else { dankLog('SDK_TAG_ID_READY', adTagId, 'warning'); } resolve(adTagId); }).catch((error) => { dankLog('SDK_TAG_ID_READY', error, 'warning'); resolve(adTagId); }); }); // Create the ad tag. // This promise can trigger the videoAdPromise. Promise.all([ gameDataPromise, adTagIdPromise, ]).then((response) => { // Start our advertisement instance. Setting up the // adsLoader should resolve VideoAdPromise. this.videoAdInstance = new VideoAd( this.options.advertisementSettings); this.videoAdInstance.gameId = this.options.gameId; // Record a game "play"-event in Tunnl. dankLog('SDK_RECORD_GAME_PLAY', '', 'success'); (new Image()).src = 'https://ana.tunnl.com/distevent?tid=' + response[1] + '&game_id=' + this.options.gameId + '&disttype=1&eventtype=1'; // Create the actual ad tag. this.videoAdInstance.tag = 'https://pub.tunnl.com/' + 'opp?tid=' + response[1] + '&player_width=640' + '&player_height=480' + '&page_url=' + encodeURIComponent(referrer) + '&game_id=' + this.options.gameId; // Enable some debugging perks. try { if (localStorage.getItem('gd_debug')) { // So we can set a custom tag. if (localStorage.getItem('gd_tag')) { this.videoAdInstance.tag = localStorage.getItem('gd_tag'); } // So we can call mid rolls quickly. if (localStorage.getItem('gd_midroll')) { response[0].midroll = localStorage.getItem('gd_midroll'); } } } catch (error) { console.log(error); } // Check if the preroll and auto play is enabled. If so, then we // start the adRequestTimer, blocking any attempts // to call any subsequent advertisement too soon, as the preroll // will be called automatically from our video advertisement // instance, instead of calling the showBanner method. if (response[0].preroll && this.videoAdInstance.options.autoplay) { this.adRequestTimer = new Date(); } this.videoAdInstance.start(); }); // Ad ready or failed. // Setup our video ad promise, which should be resolved before an ad // can be called from a click event. const videoAdPromise = new Promise((resolve, reject) => { // The ad is preloaded and ready. this.eventBus.subscribe('AD_SDK_MANAGER_READY', (arg) => resolve()); // The IMA SDK failed. this.eventBus.subscribe('AD_SDK_ERROR', (arg) => reject()); // It can happen that the first ad request failed... unlucky. this.eventBus.subscribe('AD_CANCELED', (arg) => reject()); }); // Now check if everything is ready. // We use default SDK data if the promise fails. this.readyPromise = Promise.all([ gameDataPromise, videoAdPromise, ]).then((response) => { let eventName = 'SDK_READY'; let eventMessage = 'Everything is ready.'; this.eventBus.broadcast(eventName, { name: eventName, message: eventMessage, status: 'success', analytics: { category: 'SDK', action: eventName, label: this.options.gameId, }, }); return response[0]; }).catch(() => { let eventName = 'SDK_ERROR'; let eventMessage = 'The SDK failed.'; this.eventBus.broadcast(eventName, { name: eventName, message: eventMessage, status: 'error', analytics: { category: 'SDK', action: eventName, label: this.options.gameId, }, }); return false; }); } /** * _onEvent * Gives us a nice console log message for all our events going * through the EventBus. * @param {Object} event * @private */ _onEvent(event) { // Show the event in the log. dankLog(event.name, event.message, event.status); // Push out a Google event for each event. Makes our // life easier. I think. try { /* eslint-disable */ if (typeof ga !== 'undefined') { ga('gd.send', { hitType: 'event', eventCategory: (event.analytics.category) ? event.analytics.category : '', eventAction: (event.analytics.action) ? event.analytics.action : '', eventLabel: (event.analytics.label) ? event.analytics.label : '', }); } /* eslint-enable */ } catch (error) { console.log(error); } // Now send the event to the developer. this.options.onEvent(event); } /** * _googleAnalytics * @private */ _googleAnalytics() { /* eslint-disable */ // Load Google Analytics so we can push out a Google event for // each of our events. if (typeof ga === 'undefined') { (function(i, s, o, g, r, a, m) { i['GoogleAnalyticsObject'] = r; i[r] = i[r] || function() { (i[r].q = i[r].q || []).push(arguments); }, i[r].l = 1 * new Date(); a = s.createElement(o), m = s.getElementsByTagName(o)[0]; a.async = 1; a.src = g; m.parentNode.insertBefore(a, m); })(window, document, 'script', 'https://www.google-analytics.com/analytics.js', 'ga'); } ga('create', 'UA-102601800-1', {'name': 'gd'}, 'auto'); // Inject Death Star id's to the page view. const lcl = getCookie('brzcrz_local'); if (lcl) { ga('gd.set', 'userId', lcl); ga('gd.set', 'dimension1', lcl); } ga('gd.send', 'pageview'); } /** * _deathStar * @private */ _deathStar() { // Project Death Star. // https://bitbucket.org/keygamesnetwork/datacollectionservice const script = document.createElement('script'); script.innerHTML = ` var DS_OPTIONS = { id: 'GAMEDISTRIBUTION', success: function(id) { ga('gd.set', 'userId', id); ga('gd.set', 'dimension1', id); } } `; document.head.appendChild(script); // Load Death Star (function(window, document, element, source) { const ds = document.createElement(element); const m = document.getElementsByTagName(element)[0]; ds.type = 'text/javascript'; ds.async = true; ds.src = source; m.parentNode.insertBefore(ds, m); })(window, document, 'script', 'https://game.gamemonkey.org/static/main.min.js'); /* eslint-enable */ } /** * showBanner * Used by our developer to call a video advertisement. * @public */ showBanner() { this.readyPromise.then((gameData) => { if (gameData.advertisements) { // Check if ad is not called too often. if (typeof this.adRequestTimer !== 'undefined') { const elapsed = (new Date()).valueOf() - this.adRequestTimer.valueOf(); if (elapsed < gameData.midroll) { dankLog('SDK_SHOW_BANNER', 'The advertisement was requested too soon after ' + 'the previous advertisement was finished.', 'warning'); // Resume game for legacy purposes. this.onResumeGame( 'Just resume the game...', 'success'); } else { dankLog('SDK_SHOW_BANNER', 'Requested the midroll advertisement.', 'success'); this.videoAdInstance.play(); this.adRequestTimer = new Date(); } } else { dankLog('SDK_SHOW_BANNER', 'Requested the preroll advertisement.', 'success'); this.videoAdInstance.play(); this.adRequestTimer = new Date(); } } else { this.videoAdInstance.cancel(); dankLog('SDK_SHOW_BANNER', 'Advertisements are disabled.', 'warning'); } }).catch((error) => { dankLog('SDK_SHOW_BANNER', error, 'error'); }); } /** * customLog [deprecated] * GD Logger sends how many times 'CustomLog' that is called * related to given by _key name. If you invoke 'CustomLog' many times, * it increases 'CustomLog' counter and sends this counter value. * @param {String} key * @public */ customLog(key) { // ... } /** * play [deprecated] * GD Logger sends how many times 'PlayGame' is called. If you * invoke 'PlayGame' many times, it increases 'PlayGame' counter and * sends this counter value. * @public */ play() { // ... } /** * onResumeGame * Called from various moments within the SDK. This sends * out a callback to our developer, so he/ she can allow the game to * resume again. We also call resumeGame() for backwards * compatibility reasons. * @param {String} message * @param {String} status */ onResumeGame(message, status) { this.options.resumeGame(); let eventName = 'SDK_GAME_START'; this.eventBus.broadcast(eventName, { name: eventName, message: message, status: status, analytics: { category: 'SDK', action: eventName, label: this.options.gameId, }, }); } /** * onPauseGame * Called from various moments within the SDK. This sends * out a callback to pause the game. It is required to have the game * paused when an advertisement starts playing. * @param {String} message * @param {String} status */ onPauseGame(message, status) { this.options.pauseGame(); let eventName = 'SDK_GAME_PAUSE'; this.eventBus.broadcast(eventName, { name: eventName, message: message, status: status, analytics: { category: 'SDK', action: eventName, label: this.options.gameId, }, }); } /** * openConsole * Enable debugging, we also set a value in localStorage, * so we can also enable debugging without setting the property. * This is nice for when we're trying to debug a game that is not ours. * @public */ openConsole() { try { const implementation = new ImplementationTest(); implementation.start(); localStorage.setItem('gd_debug', true); } catch (error) { console.log(error); } } } export default SDK;
src/main.js
'use strict'; import PackageJSON from '../package.json'; import VideoAd from './components/VideoAd'; import EventBus from './components/EventBus'; import ImplementationTest from './components/ImplementationTest'; import {dankLog} from './modules/dankLog'; import { extendDefaults, getParentUrl, getParentDomain, getCookie, } from './modules/common'; let instance = null; /** * SDK */ class SDK { /** * Constructor of SDK. * @param {Object} options * @return {*} */ constructor(options) { // Make this a singleton. if (instance) { return instance; } else { instance = this; } // Set some defaults. We replace them with real given // values further down. const defaults = { debug: false, gameId: '4f3d7d38d24b740c95da2b03dc3a2333', userId: '31D29405-8D37-4270-BF7C-8D99CCF0177F-s1', advertisementSettings: {}, resumeGame: function() { // ... }, pauseGame: function() { // ... }, onEvent: function(event) { // ... }, onInit: function(data) { // ... }, onError: function(data) { // ... }, }; if (options) { this.options = extendDefaults(defaults, options); } else { this.options = defaults; } // Open the debug console when debugging is enabled. try { if (this.options.debug || localStorage.getItem('gd_debug')) { this.openConsole(); } } catch (error) { console.log(error); } // Set a version banner within the developer console. const date = new Date(); const versionInformation = { version: PackageJSON.version, date: date.getDate() + '-' + (date.getMonth() + 1) + '-' + date.getFullYear(), time: date.getHours() + ':' + date.getMinutes(), }; const banner = console.log( '%c %c %c Gamedistribution.com HTML5 SDK | Version: ' + versionInformation.version + ' (' + versionInformation.date + ' ' + versionInformation.time + ') %c %c %c', 'background: #9854d8', 'background: #6c2ca7', 'color: #fff; background: #450f78;', 'background: #6c2ca7', 'background: #9854d8', 'background: #ffffff'); /* eslint-disable */ console.log.apply(console, banner); /* eslint-enable */ // Get referrer domain data. const referrer = getParentUrl(); const parentDomain = getParentDomain(); // Call Google Analytics. this._googleAnalytics(); // Call Death Star. this._deathStar(); // Setup all event listeners. // We also send a Google Analytics event for each one of our events. this.eventBus = new EventBus(); this.eventBus.gameId = this.options.gameId; // SDK events this.eventBus.subscribe('SDK_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SDK_ERROR', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SDK_GAME_DATA_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SDK_GAME_START', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SDK_GAME_PAUSE', (arg) => this._onEvent(arg)); // IMA HTML5 SDK events this.eventBus.subscribe('AD_SDK_LOADER_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SDK_MANAGER_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SDK_REQUEST_ADS', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SDK_ERROR', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SDK_FINISHED', (arg) => this._onEvent(arg)); // Ad events this.eventBus.subscribe('AD_CANCELED', (arg) => { this._onEvent(arg); this.onResumeGame( 'Advertisement error, no worries, start / resume the game.', 'warning'); }); this.eventBus.subscribe('AD_ERROR', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_SAFETY_TIMER', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_BREAK_READY', (arg) => this._onEvent(arg)); this.eventBus.subscribe('AD_METADATA', (arg) => this._onEvent(arg)); this.eventBus.subscribe('ALL_ADS_COMPLETED', (arg) => { this._onEvent(arg); this.onResumeGame( 'Advertisement(s) are done. Start / resume the game.', 'success'); // Do a request to flag the sdk as available within the catalog. // This flagging allows our developer to do a request to publish // this game, otherwise this option would remain unavailable. const protocol = ('https:' === document.location.protocol) ? 'https:' : 'http:'; if (referrer === protocol + '//gamedistribution.com/controlpanel/game/edit/' + this.options.gameId) { const updateCatalogUrl = 'https://game.api.gamedistribution.com/game/updateapi/' + this.options.gameId; const gameDataRequest = new Request(updateCatalogUrl, {method: 'GET'}); fetch(gameDataRequest).then((response) => { if (response.status === 200) { dankLog('SDK_FLAG', 'active', 'success'); } else { dankLog('SDK_FLAG', response.status, 'warning'); } }).catch((error) => { dankLog('SDK_FLAG', error, 'error'); }); } }); this.eventBus.subscribe('CLICK', (arg) => this._onEvent(arg)); this.eventBus.subscribe('COMPLETE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('CONTENT_PAUSE_REQUESTED', (arg) => { this._onEvent(arg); this.onPauseGame('New advertisements requested and loaded', 'success'); }); this.eventBus.subscribe('CONTENT_RESUME_REQUESTED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('DURATION_CHANGE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('FIRST_QUARTILE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('IMPRESSION', (arg) => this._onEvent(arg)); this.eventBus.subscribe('INTERACTION', (arg) => this._onEvent(arg)); this.eventBus.subscribe('LINEAR_CHANGED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('LOADED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('LOG', (arg) => this._onEvent(arg)); this.eventBus.subscribe('MIDPOINT', (arg) => this._onEvent(arg)); this.eventBus.subscribe('PAUSED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('RESUMED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SKIPPABLE_STATE_CHANGED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('SKIPPED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('STARTED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('THIRD_QUARTILE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('USER_CLOSE', (arg) => this._onEvent(arg)); this.eventBus.subscribe('VOLUME_CHANGED', (arg) => this._onEvent(arg)); this.eventBus.subscribe('VOLUME_MUTED', (arg) => this._onEvent(arg)); // Only allow ads after the preroll and after a certain amount of time. // This time restriction is available from gameData. this.adRequestTimer = undefined; // Game API. // If it fails we use default data, so this should always resolve. let gameData = { gameId: '49258a0e497c42b5b5d87887f24d27a6', // Jewel Burst. affiliate: 'A-GAMEDIST', advertisements: true, preroll: true, midroll: 2 * 60000, title: '', tags: '', category: '', }; const gameDataPromise = new Promise((resolve) => { const gameDataUrl = 'https://game.api.gamedistribution.com/' + 'game/get/' + this.options.gameId + '?domain=' + parentDomain; const gameDataRequest = new Request(gameDataUrl, {method: 'GET'}); fetch(gameDataRequest). then((response) => { const contentType = response.headers.get('content-type'); if (contentType && contentType.includes('application/json')) { return response.json(); } else { throw new TypeError('Oops, we didn\'t get JSON!'); } }). then(json => { if (!json.success && json.error) { dankLog('SDK_GAME_DATA_READY', json.error, 'warning'); } try { const retrievedGameData = { gameId: json.result.game.gameMd5, affiliate: json.result.affiliate.affiliateId, advertisements: json.result.game.enableAds, preroll: json.result.game.preRoll, midroll: json.result.game.timeAds * 60000, title: json.result.game.title, category: json.result.game.category, tags: json.result.game.tags, }; gameData = extendDefaults(gameData, retrievedGameData); dankLog('SDK_GAME_DATA_READY', gameData, 'success'); // Try to send some additional analytics to Death Star. try { let tagsArray = []; gameData.tags.forEach((tag) => { tagsArray.push(tag.title.toLowerCase()); }); ga('gd.set', 'dimension2', gameData.title.toLowerCase()); ga('gd.set', 'dimension3', tagsArray.join(', ')); } catch (error) { console.log(error); } } catch (error) { dankLog('SDK_GAME_DATA_READY', error, 'warning'); } resolve(gameData); }). catch((error) => { dankLog('SDK_GAME_DATA_READY', error, 'success'); resolve(gameData); }); }); // Tunnl. // Get the affiliate id from Tunnl. // If it fails we continue the game, so this should always resolve. const adTagIdPromise = new Promise((resolve) => { const adTagIdUrl = 'https://ana.tunnl.com/at?id=' + this.options.gameId + '&pageurl=' + parentDomain + '&type=1'; const adTagIdRequest = new Request(adTagIdUrl, {method: 'GET'}); let adTagId = 'T-17112073197'; fetch(adTagIdRequest).then(response => { const contentType = response.headers.get('content-type'); if (contentType && contentType.includes('application/json')) { return response.json(); } else { throw new TypeError('Oops, we didn\'t get JSON!'); } }).then(json => { if (json.AdTagId) { adTagId = json.AdTagId; dankLog('SDK_TAG_ID_READY', adTagId, 'success'); resolve(adTagId); } else { dankLog('SDK_TAG_ID_READY', adTagId, 'warning'); } resolve(adTagId); }).catch((error) => { dankLog('SDK_TAG_ID_READY', error, 'warning'); resolve(adTagId); }); }); // Create the ad tag. // This promise can trigger the videoAdPromise. Promise.all([ gameDataPromise, adTagIdPromise, ]).then((response) => { // Start our advertisement instance. Setting up the // adsLoader should resolve VideoAdPromise. this.videoAdInstance = new VideoAd( this.options.advertisementSettings); this.videoAdInstance.gameId = this.options.gameId; // Record a game "play"-event in Tunnl. dankLog('SDK_RECORD_GAME_PLAY', '', 'success'); (new Image()).src = 'https://ana.tunnl.com/distevent?tid=' + response[1] + '&game_id=' + this.options.gameId + '&disttype=1&eventtype=1'; // Create the actual ad tag. this.videoAdInstance.tag = 'https://pub.tunnl.com/' + 'opp?tid=' + response[1] + '&player_width=640' + '&player_height=480' + '&page_url=' + encodeURIComponent(referrer) + '&game_id=' + this.options.gameId; // Enable some debugging perks. try { if (localStorage.getItem('gd_debug')) { // So we can set a custom tag. if (localStorage.getItem('gd_tag')) { this.videoAdInstance.tag = localStorage.getItem('gd_tag'); } // So we can call mid rolls quickly. if (localStorage.getItem('gd_midroll')) { response[0].midroll = localStorage.getItem('gd_midroll'); } } } catch (error) { console.log(error); } // Check if the preroll and auto play is enabled. If so, then we // start the adRequestTimer, blocking any attempts // to call any subsequent advertisement too soon, as the preroll // will be called automatically from our video advertisement // instance, instead of calling the showBanner method. if (response[0].preroll && this.videoAdInstance.options.autoplay) { this.adRequestTimer = new Date(); } this.videoAdInstance.start(); }); // Ad ready or failed. // Setup our video ad promise, which should be resolved before an ad // can be called from a click event. const videoAdPromise = new Promise((resolve, reject) => { // The ad is preloaded and ready. this.eventBus.subscribe('AD_SDK_MANAGER_READY', (arg) => resolve()); // The IMA SDK failed. this.eventBus.subscribe('AD_SDK_ERROR', (arg) => reject()); // It can happen that the first ad request failed... unlucky. this.eventBus.subscribe('AD_CANCELED', (arg) => reject()); }); // Now check if everything is ready. // We use default SDK data if the promise fails. this.readyPromise = Promise.all([ gameDataPromise, videoAdPromise, ]).then((response) => { let eventName = 'SDK_READY'; let eventMessage = 'Everything is ready.'; this.eventBus.broadcast(eventName, { name: eventName, message: eventMessage, status: 'success', analytics: { category: 'SDK', action: eventName, label: this.options.gameId, }, }); return response[0]; }).catch(() => { let eventName = 'SDK_ERROR'; let eventMessage = 'The SDK failed.'; this.eventBus.broadcast(eventName, { name: eventName, message: eventMessage, status: 'error', analytics: { category: 'SDK', action: eventName, label: this.options.gameId, }, }); return false; }); } /** * _onEvent * Gives us a nice console log message for all our events going * through the EventBus. * @param {Object} event * @private */ _onEvent(event) { // Show the event in the log. dankLog(event.name, event.message, event.status); // Push out a Google event for each event. Makes our // life easier. I think. try { /* eslint-disable */ if (typeof ga !== 'undefined') { ga('gd.send', { hitType: 'event', eventCategory: (event.analytics.category) ? event.analytics.category : '', eventAction: (event.analytics.action) ? event.analytics.action : '', eventLabel: (event.analytics.label) ? event.analytics.label : '', }); } /* eslint-enable */ } catch (error) { console.log(error); } // Now send the event to the developer. this.options.onEvent(event); } /** * _googleAnalytics * @private */ _googleAnalytics() { /* eslint-disable */ // Load Google Analytics so we can push out a Google event for // each of our events. if (typeof ga === 'undefined') { (function(i, s, o, g, r, a, m) { i['GoogleAnalyticsObject'] = r; i[r] = i[r] || function() { (i[r].q = i[r].q || []).push(arguments); }, i[r].l = 1 * new Date(); a = s.createElement(o), m = s.getElementsByTagName(o)[0]; a.async = 1; a.src = g; m.parentNode.insertBefore(a, m); })(window, document, 'script', 'https://www.google-analytics.com/analytics.js', 'ga'); } ga('create', 'UA-102601800-1', {'name': 'gd'}, 'auto'); // Inject Death Star id's to the page view. const lcl = getCookie('brzcrz_local'); if (lcl) { ga('gd.set', 'userId', lcl); ga('gd.set', 'dimension1', lcl); } ga('gd.send', 'pageview'); } /** * _deathStar * @private */ _deathStar() { // Project Death Star. // https://bitbucket.org/keygamesnetwork/datacollectionservice const script = document.createElement('script'); script.innerHTML = ` var DS_OPTIONS = { id: 'GAMEDISTRIBUTION', success: function(id) { ga('gd.set', 'userId', id); ga('gd.set', 'dimension1', id); } } `; document.head.appendChild(script); // Load Death Star (function(window, document, element, source) { const ds = document.createElement(element); const m = document.getElementsByTagName(element)[0]; ds.type = 'text/javascript'; ds.async = true; ds.src = source; m.parentNode.insertBefore(ds, m); })(window, document, 'script', 'https://game.gamemonkey.org/static/main.min.js'); /* eslint-enable */ } /** * showBanner * Used by our developer to call a video advertisement. * @public */ showBanner() { this.readyPromise.then((gameData) => { if (gameData.advertisements) { // Check if ad is not called too often. if (typeof this.adRequestTimer !== 'undefined') { const elapsed = (new Date()).valueOf() - this.adRequestTimer.valueOf(); if (elapsed < gameData.midroll) { dankLog('SDK_SHOW_BANNER', 'The advertisement was requested too soon after ' + 'the previous advertisement was finished.', 'warning'); // Resume game for legacy purposes. this.onResumeGame( 'Just resume the game...', 'success'); } else { dankLog('SDK_SHOW_BANNER', 'Requested the midroll advertisement.', 'success'); this.videoAdInstance.play(); this.adRequestTimer = new Date(); } } else { dankLog('SDK_SHOW_BANNER', 'Requested the preroll advertisement.', 'success'); this.videoAdInstance.play(); this.adRequestTimer = new Date(); } } else { this.videoAdInstance.cancel(); dankLog('SDK_SHOW_BANNER', 'Advertisements are disabled.', 'warning'); } }).catch((error) => { dankLog('SDK_SHOW_BANNER', error, 'error'); }); } /** * customLog [deprecated] * GD Logger sends how many times 'CustomLog' that is called * related to given by _key name. If you invoke 'CustomLog' many times, * it increases 'CustomLog' counter and sends this counter value. * @param {String} key * @public */ customLog(key) { // ... } /** * play [deprecated] * GD Logger sends how many times 'PlayGame' is called. If you * invoke 'PlayGame' many times, it increases 'PlayGame' counter and * sends this counter value. * @public */ play() { // ... } /** * onResumeGame * Called from various moments within the SDK. This sends * out a callback to our developer, so he/ she can allow the game to * resume again. We also call resumeGame() for backwards * compatibility reasons. * @param {String} message * @param {String} status */ onResumeGame(message, status) { this.options.resumeGame(); let eventName = 'SDK_GAME_START'; this.eventBus.broadcast(eventName, { name: eventName, message: message, status: status, analytics: { category: 'SDK', action: eventName, label: this.options.gameId, }, }); } /** * onPauseGame * Called from various moments within the SDK. This sends * out a callback to pause the game. It is required to have the game * paused when an advertisement starts playing. * @param {String} message * @param {String} status */ onPauseGame(message, status) { this.options.pauseGame(); let eventName = 'SDK_GAME_PAUSE'; this.eventBus.broadcast(eventName, { name: eventName, message: message, status: status, analytics: { category: 'SDK', action: eventName, label: this.options.gameId, }, }); } /** * openConsole * Enable debugging, we also set a value in localStorage, * so we can also enable debugging without setting the property. * This is nice for when we're trying to debug a game that is not ours. * @public */ openConsole() { try { const implementation = new ImplementationTest(); implementation.start(); localStorage.setItem('gd_debug', true); } catch (error) { console.log(error); } } } export default SDK;
VGD-615 - Now just simply trigger the get request using an image.
src/main.js
VGD-615 - Now just simply trigger the get request using an image.
<ide><path>rc/main.js <ide> if (referrer === <ide> protocol + '//gamedistribution.com/controlpanel/game/edit/' + <ide> this.options.gameId) { <del> const updateCatalogUrl = 'https://game.api.gamedistribution.com/game/updateapi/' + <add> (new Image()).src = 'https://game.api.gamedistribution.com/game/updateapi/' + <ide> this.options.gameId; <del> const gameDataRequest = new Request(updateCatalogUrl, <del> {method: 'GET'}); <del> fetch(gameDataRequest).then((response) => { <del> if (response.status === 200) { <del> dankLog('SDK_FLAG', 'active', 'success'); <del> } else { <del> dankLog('SDK_FLAG', response.status, 'warning'); <del> } <del> }).catch((error) => { <del> dankLog('SDK_FLAG', error, 'error'); <del> }); <ide> } <ide> }); <ide> this.eventBus.subscribe('CLICK', (arg) => this._onEvent(arg));
Java
apache-2.0
a0146fccb83ba2839ba6fe527c8397332408e6c8
0
Terradue/one,abelCoronado93/one,Terradue/one,baby-gnu/one,juanmont/one,spirit03/one,OpenNebula/one,fasrc/one,baby-gnu/one,dberzano/opennebula-torino,cloudweavers/one,unistra/one,fasrc/one,cloudweavers/one,baby-gnu/one,atodorov-storpool/one,hsanjuan/one,ohamada/one,mattthias/one,mattthias/one,unistra/one,goberle/one,ggalancs/one,abelCoronado93/one,goberle/one,atodorov-storpool/one,fasrc/one,goberle/one,spirit03/one,juanmont/one,ohamada/one,alvarosimon/one,Terradue/one,larsks/opennebula-lks,unistra/one,hsanjuan/one,larsks/opennebula-lks,cloudweavers/one,tuxmea/one,juanmont/one,atodorov-storpool/one,ohamada/one,tuxmea/one,cloudweavers/one,cloudweavers/one,unistra/one,mattthias/one,abelCoronado93/one,goberle/one,ohamada/one,OpenNebula/one,larsks/opennebula-lks,dberzano/opennebula-torino,hsanjuan/one,juanmont/one,cloudweavers/one,dberzano/opennebula-torino,ohamada/one,juanmont/one,baby-gnu/one,ggalancs/one,abelCoronado93/one,fasrc/one,OpenNebula/one,alvarosimon/one,atodorov-storpool/one,atodorov-storpool/one,hsanjuan/one,goberle/one,OpenNebula/one,alvarosimon/one,baby-gnu/one,spirit03/one,ggalancs/one,atodorov-storpool/one,larsks/opennebula-lks,abelCoronado93/one,larsks/opennebula-lks,abelCoronado93/one,atodorov-storpool/one,unistra/one,hsanjuan/one,fasrc/one,abelCoronado93/one,mattthias/one,cloudweavers/one,tuxmea/one,alvarosimon/one,abelCoronado93/one,fasrc/one,mattthias/one,spirit03/one,ohamada/one,atodorov-storpool/one,tuxmea/one,juanmont/one,goberle/one,Terradue/one,dberzano/opennebula-torino,dberzano/opennebula-torino,OpenNebula/one,dberzano/opennebula-torino,larsks/opennebula-lks,OpenNebula/one,baby-gnu/one,ggalancs/one,spirit03/one,baby-gnu/one,hsanjuan/one,cloudweavers/one,larsks/opennebula-lks,unistra/one,Terradue/one,spirit03/one,goberle/one,hsanjuan/one,juanmont/one,ggalancs/one,ohamada/one,baby-gnu/one,hsanjuan/one,ggalancs/one,mattthias/one,juanmont/one,tuxmea/one,alvarosimon/one,alvarosimon/one,alvarosimon/one,juanmont/one,Terradue/one,ggalancs/one,fasrc/one,OpenNebula/one,fasrc/one,unistra/one,spirit03/one,mattthias/one,alvarosimon/one,OpenNebula/one,dberzano/opennebula-torino,ggalancs/one,Terradue/one,tuxmea/one,spirit03/one,OpenNebula/one,Terradue/one,tuxmea/one,atodorov-storpool/one,ohamada/one,unistra/one,goberle/one,tuxmea/one,mattthias/one
/******************************************************************************* * Copyright 2002-2010, OpenNebula Project Leads (OpenNebula.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.opennebula.client.vm; import org.opennebula.client.Client; import org.opennebula.client.OneResponse; import org.opennebula.client.PoolElement; import org.w3c.dom.Node; /** * This class represents an OpenNebula VM. * It also offers static XML-RPC call wrappers. */ public class VirtualMachine extends PoolElement{ private static final String METHOD_PREFIX = "vm."; private static final String ALLOCATE = METHOD_PREFIX + "allocate"; private static final String INFO = METHOD_PREFIX + "info"; private static final String DEPLOY = METHOD_PREFIX + "deploy"; private static final String ACTION = METHOD_PREFIX + "action"; private static final String MIGRATE = METHOD_PREFIX + "migrate"; private static final String SAVEDISK = METHOD_PREFIX + "savedisk"; private static final String[] VM_STATES = { "INIT", "PENDING", "HOLD", "ACTIVE", "STOPPED", "SUSPENDED", "DONE", "FAILED" }; private static final String[] SHORT_VM_STATES = { "init", "pend", "hold", "actv", "stop", "susp", "done", "fail" }; private static final String[] LCM_STATE = { "LCM_INIT", "PROLOG", "BOOT", "RUNNING", "MIGRATE", "SAVE_STOP", "SAVE_SUSPEND", "SAVE_MIGRATE", "PROLOG_MIGRATE", "PROLOG_RESUME", "EPILOG_STOP", "EPILOG", "SHUTDOWN", "CANCEL", "FAILURE", "DELETE", "UNKNOWN" }; private static final String[] SHORT_LCM_STATES = { null, "prol", "boot", "runn", "migr", "save", "save", "save", "migr", "prol", "epil", "epil", "shut", "shut", "fail", "dele", "unkn" }; /** * Creates a new VM representation. * * @param id The virtual machine Id (vid). * @param client XML-RPC Client. */ public VirtualMachine(int id, Client client) { super(id, client); } /** * @see PoolElement */ protected VirtualMachine(Node xmlElement, Client client) { super(xmlElement, client); } // ================================= // Static XML-RPC methods // ================================= /** * Allocates a new VM in OpenNebula. * * @param client XML-RPC Client. * @param description A string containing the template of the vm. * @return If successful the message contains the associated * id generated for this VM. */ public static OneResponse allocate(Client client, String description) { return client.call(ALLOCATE, description); } /** * Retrieves the information of the given VM. * * @param client XML-RPC Client. * @param id The virtual machine id (vid) of the target instance. * @return If successful the message contains the string * with the information returned by OpenNebula. */ public static OneResponse info(Client client, int id) { return client.call(INFO, id); } // ================================= // Instanced object XML-RPC methods // ================================= /** * Loads the xml representation of the virtual machine. * The info is also stored internally. * * @see VirtualMachine#info(Client, int) */ public OneResponse info() { OneResponse response = info(client, id); super.processInfo(response); return response; } /** * Initiates the instance of the VM on the target host. * * @param hostId The host id (hid) of the target host where * the VM will be instantiated. * @return If an error occurs the error message contains the reason. */ public OneResponse deploy(int hostId) { return client.call(DEPLOY, id, hostId); } /** * Submits an action to be performed on the virtual machine. * <br/> * It is recommended to use the helper methods instead: * <ul> * <li>{@link VirtualMachine#shutdown()}</li> * <li>{@link VirtualMachine#cancel()}</li> * <li>{@link VirtualMachine#hold()}</li> * <li>{@link VirtualMachine#release()}</li> * <li>{@link VirtualMachine#stop()}</li> * <li>{@link VirtualMachine#suspend()}</li> * <li>{@link VirtualMachine#resume()}</li> * <li>{@link VirtualMachine#finalizeVM()}</li> * <li>{@link VirtualMachine#restart()}</li> * </ul> * * @param action The action name to be performed, can be:<br/> * "shutdown", "hold", "release", "stop", "cancel", "suspend", * "resume", "restart", "finalize". * @return If an error occurs the error message contains the reason. */ protected OneResponse action(String action) { return client.call(ACTION, action, id); } /** * Migrates the virtual machine to the target host (hid). * * @param hostId The target host id (hid) where we want to migrate * the vm. * @param live If true we are indicating that we want livemigration, * otherwise false. * @return If an error occurs the error message contains the reason. */ public OneResponse migrate(int hostId, boolean live) { return client.call(MIGRATE, id, hostId, live); } /** * Sets the specified vm's disk to be saved in a new image when the * VirtualMachine shutdowns. * * @param diskId ID of the disk to be saved. * @param imageId ID of the image where the disk will be saved. * @return If an error occurs the error message contains the reason. */ public OneResponse savedisk(int diskId, int imageId) { return client.call(SAVEDISK, id ,diskId, imageId); } // ================================= // Helpers // ================================= /** * Shuts down the already deployed VM. * @return If an error occurs the error message contains the reason. */ public OneResponse shutdown() { return action("shutdown"); } /** * Cancels the running VM. * @return If an error occurs the error message contains the reason. */ public OneResponse cancel() { return action("cancel"); } /** * Sets the VM to hold state. The VM will not be scheduled until it is * released. * @return If an error occurs the error message contains the reason. */ public OneResponse hold() { return action("hold"); } /** * Releases a virtual machine from hold state. * @return If an error occurs the error message contains the reason. */ public OneResponse release() { return action("release"); } /** * Stops the virtual machine. The virtual machine state is transferred back * to OpenNebula for a possible reschedule. * @return If an error occurs the error message contains the reason. */ public OneResponse stop() { return action("stop"); } /** * Suspends the virtual machine. The virtual machine state is left in the * cluster node for resuming. * @return If an error occurs the error message contains the reason. */ public OneResponse suspend() { return action("suspend"); } /** * Resumes the execution of a saved VM. * @return If an error occurs the error message contains the reason. */ public OneResponse resume() { return action("resume"); } /** * Deletes the VM from the pool and database. * @return If an error occurs the error message contains the reason. */ public OneResponse finalizeVM() { return action("finalize"); } /** * Resubmits the virtual machine after failure. * @return If an error occurs the error message contains the reason. */ public OneResponse restart() { return action("shutdown"); } /** * Migrates the virtual machine to the target host (hid). * <br/> * It does the same as {@link VirtualMachine#migrate(int, boolean)} * with live set to false. * * @param hostId The target host id (hid) where we want to migrate * the vm. * @return If an error occurs the error message contains the reason. */ public OneResponse migrate(int hostId) { return migrate(hostId, false); } /** * Performs a live migration of the virtual machine to the * target host (hid). * <br/> * It does the same as {@link VirtualMachine#migrate(int, boolean)} * with live set to true. * * @param hostId The target host id (hid) where we want to migrate * the vm. * @return If an error occurs the error message contains the reason. */ public OneResponse liveMigrate(int hostId) { return migrate(hostId, true); } public int state() { return super.state(); } /** * Returns the VM state of the VirtualMachine (string value). * @return The VM state of the VirtualMachine (string value). */ public String stateStr() { int state = state(); return state != -1 ? VM_STATES[state()] : null; } /** * Returns the LCM state of the VirtualMachine (numeric value). * @return The LCM state of the VirtualMachine (numeric value). */ public int lcmState() { String state = xpath("LCM_STATE"); return state != null ? Integer.parseInt(state) : -1; } /** * Returns the LCM state of the VirtualMachine (string value). * @return The LCM state of the VirtualMachine (string value). */ public String lcmStateStr() { int state = lcmState(); return state != -1 ? LCM_STATE[state] : null; } /** * Returns the short status string for the VirtualMachine. * @return The short status string for the VirtualMachine. */ public String status() { int state = state(); String shortStateStr = null; if(state != -1) { shortStateStr = SHORT_VM_STATES[state]; if(shortStateStr.equals("actv")) { int lcmState = lcmState(); if(lcmState != -1) shortStateStr = SHORT_LCM_STATES[lcmState]; } } return shortStateStr; } }
src/oca/java/src/org/opennebula/client/vm/VirtualMachine.java
/******************************************************************************* * Copyright 2002-2010, OpenNebula Project Leads (OpenNebula.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package org.opennebula.client.vm; import org.opennebula.client.Client; import org.opennebula.client.OneResponse; import org.opennebula.client.PoolElement; import org.w3c.dom.Node; /** * This class represents an OpenNebula VM. * It also offers static XML-RPC call wrappers. */ public class VirtualMachine extends PoolElement{ private static final String METHOD_PREFIX = "vm."; private static final String ALLOCATE = METHOD_PREFIX + "allocate"; private static final String INFO = METHOD_PREFIX + "info"; private static final String DEPLOY = METHOD_PREFIX + "deploy"; private static final String ACTION = METHOD_PREFIX + "action"; private static final String MIGRATE = METHOD_PREFIX + "migrate"; private static final String SAVEDISK = METHOD_PREFIX + "savedisk"; private static final String[] VM_STATES = { "INIT", "PENDING", "HOLD", "ACTIVE", "STOPPED", "SUSPENDED", "DONE", "FAILED" }; private static final String[] SHORT_VM_STATES = { "init", "pend", "hold", "actv", "stop", "susp", "done", "fail" }; private static final String[] LCM_STATE = { "LCM_INIT", "PROLOG", "BOOT", "RUNNING", "MIGRATE", "SAVE_STOP", "SAVE_SUSPEND", "SAVE_MIGRATE", "PROLOG_MIGRATE", "PROLOG_RESUME", "EPILOG_STOP", "EPILOG", "SHUTDOWN", "CANCEL", "FAILURE", "DELETE", "UNKNOWN" }; private static final String[] SHORT_LCM_STATES = { null, "prol", "boot", "runn", "migr", "save", "save", "save", "migr", "prol", "epil", "epil", "shut", "shut", "fail", "dele", "unkn" }; /** * Creates a new VM representation. * * @param id The virtual machine Id (vid). * @param client XML-RPC Client. */ public VirtualMachine(int id, Client client) { super(id, client); } /** * @see PoolElement */ protected VirtualMachine(Node xmlElement, Client client) { super(xmlElement, client); } // ================================= // Static XML-RPC methods // ================================= /** * Allocates a new VM in OpenNebula. * * @param client XML-RPC Client. * @param description A string containing the template of the vm. * @return If successful the message contains the associated * id generated for this VM. */ public static OneResponse allocate(Client client, String description) { return client.call(ALLOCATE, description); } /** * Retrieves the information of the given VM. * * @param client XML-RPC Client. * @param id The virtual machine id (vid) of the target instance. * @return If successful the message contains the string * with the information returned by OpenNebula. */ public static OneResponse info(Client client, int id) { return client.call(INFO, id); } // ================================= // Instanced object XML-RPC methods // ================================= /** * Loads the xml representation of the virtual machine. * The info is also stored internally. * * @see VirtualMachine#info(Client, int) */ public OneResponse info() { OneResponse response = info(client, id); super.processInfo(response); return response; } /** * Initiates the instance of the VM on the target host. * * @param hostId The host id (hid) of the target host where * the VM will be instantiated. * @return If an error occurs the error message contains the reason. */ public OneResponse deploy(int hostId) { return client.call(DEPLOY, id, hostId); } /** * Submits an action to be performed on the virtual machine. * <br/> * It is recommended to use the helper methods instead: * <ul> * <li>{@link VirtualMachine#shutdown()}</li> * <li>{@link VirtualMachine#cancel()}</li> * <li>{@link VirtualMachine#hold()}</li> * <li>{@link VirtualMachine#release()}</li> * <li>{@link VirtualMachine#stop()}</li> * <li>{@link VirtualMachine#suspend()}</li> * <li>{@link VirtualMachine#resume()}</li> * <li>{@link VirtualMachine#finalizeVM()}</li> * <li>{@link VirtualMachine#restart()}</li> * </ul> * * @param action The action name to be performed, can be:<br/> * "shutdown", "hold", "release", "stop", "cancel", "suspend", * "resume", "restart", "finalize". * @return If an error occurs the error message contains the reason. */ protected OneResponse action(String action) { return client.call(ACTION, action, id); } /** * Migrates the virtual machine to the target host (hid). * * @param hostId The target host id (hid) where we want to migrate * the vm. * @param live If true we are indicating that we want livemigration, * otherwise false. * @return If an error occurs the error message contains the reason. */ public OneResponse migrate(int hostId, boolean live) { return client.call(MIGRATE, id, hostId, live); } /** * Sets the specified vm's disk to be saved in a new image when the * VirtualMachine shutdowns. * * @param diskId ID of the disk to be saved. * @param imageId ID of the image where the disk will be saved. * @return If an error occurs the error message contains the reason. */ public OneResponse savedisk(int diskId, int imageId) { return client.call(SAVEDISK, diskId, imageId); } // ================================= // Helpers // ================================= /** * Shuts down the already deployed VM. * @return If an error occurs the error message contains the reason. */ public OneResponse shutdown() { return action("shutdown"); } /** * Cancels the running VM. * @return If an error occurs the error message contains the reason. */ public OneResponse cancel() { return action("cancel"); } /** * Sets the VM to hold state. The VM will not be scheduled until it is * released. * @return If an error occurs the error message contains the reason. */ public OneResponse hold() { return action("hold"); } /** * Releases a virtual machine from hold state. * @return If an error occurs the error message contains the reason. */ public OneResponse release() { return action("release"); } /** * Stops the virtual machine. The virtual machine state is transferred back * to OpenNebula for a possible reschedule. * @return If an error occurs the error message contains the reason. */ public OneResponse stop() { return action("stop"); } /** * Suspends the virtual machine. The virtual machine state is left in the * cluster node for resuming. * @return If an error occurs the error message contains the reason. */ public OneResponse suspend() { return action("suspend"); } /** * Resumes the execution of a saved VM. * @return If an error occurs the error message contains the reason. */ public OneResponse resume() { return action("resume"); } /** * Deletes the VM from the pool and database. * @return If an error occurs the error message contains the reason. */ public OneResponse finalizeVM() { return action("finalize"); } /** * Resubmits the virtual machine after failure. * @return If an error occurs the error message contains the reason. */ public OneResponse restart() { return action("shutdown"); } /** * Migrates the virtual machine to the target host (hid). * <br/> * It does the same as {@link VirtualMachine#migrate(int, boolean)} * with live set to false. * * @param hostId The target host id (hid) where we want to migrate * the vm. * @return If an error occurs the error message contains the reason. */ public OneResponse migrate(int hostId) { return migrate(hostId, false); } /** * Performs a live migration of the virtual machine to the * target host (hid). * <br/> * It does the same as {@link VirtualMachine#migrate(int, boolean)} * with live set to true. * * @param hostId The target host id (hid) where we want to migrate * the vm. * @return If an error occurs the error message contains the reason. */ public OneResponse liveMigrate(int hostId) { return migrate(hostId, true); } public int state() { return super.state(); } /** * Returns the VM state of the VirtualMachine (string value). * @return The VM state of the VirtualMachine (string value). */ public String stateStr() { int state = state(); return state != -1 ? VM_STATES[state()] : null; } /** * Returns the LCM state of the VirtualMachine (numeric value). * @return The LCM state of the VirtualMachine (numeric value). */ public int lcmState() { String state = xpath("LCM_STATE"); return state != null ? Integer.parseInt(state) : -1; } /** * Returns the LCM state of the VirtualMachine (string value). * @return The LCM state of the VirtualMachine (string value). */ public String lcmStateStr() { int state = lcmState(); return state != -1 ? LCM_STATE[state] : null; } /** * Returns the short status string for the VirtualMachine. * @return The short status string for the VirtualMachine. */ public String status() { int state = state(); String shortStateStr = null; if(state != -1) { shortStateStr = SHORT_VM_STATES[state]; if(shortStateStr.equals("actv")) { int lcmState = lcmState(); if(lcmState != -1) shortStateStr = SHORT_LCM_STATES[lcmState]; } } return shortStateStr; } }
Bug #416: Java OCA VirtualMachine.savedisk fixed thanks to Takeo Ohno.
src/oca/java/src/org/opennebula/client/vm/VirtualMachine.java
Bug #416: Java OCA VirtualMachine.savedisk fixed thanks to Takeo Ohno.
<ide><path>rc/oca/java/src/org/opennebula/client/vm/VirtualMachine.java <ide> */ <ide> public OneResponse savedisk(int diskId, int imageId) <ide> { <del> return client.call(SAVEDISK, diskId, imageId); <add> return client.call(SAVEDISK, id ,diskId, imageId); <ide> } <ide> <ide> // =================================
Java
lgpl-2.1
d9c0063b047255925eb8e2d33f6824aa5db91847
0
kurtwalker/pdi-agile-bi-plugin,pedrofvteixeira/pdi-agile-bi-plugin,rmansoor/pdi-agile-bi-plugin,bmorrise/pdi-agile-bi-plugin,pedrofvteixeira/pdi-agile-bi-plugin,pentaho-nbaker/pdi-agile-bi-plugin,bmorrise/pdi-agile-bi-plugin,mkambol/pdi-agile-bi-plugin,kurtwalker/pdi-agile-bi-plugin,pentaho-nbaker/pdi-agile-bi-plugin,mdamour1976/pdi-agile-bi-plugin,rmansoor/pdi-agile-bi-plugin,mdamour1976/pdi-agile-bi-plugin,mkambol/pdi-agile-bi-plugin,rmansoor/pdi-agile-bi-plugin
/* * This program is free software; you can redistribute it and/or modify it under the * terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software * Foundation. * * You should have received a copy of the GNU Lesser General Public License along with this * program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html * or from the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * Copyright (c) 2009 Pentaho Corporation.. All rights reserved. */ package org.pentaho.agilebi.pdi.wizard.ui.xul.steps; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.swing.table.TableModel; import org.pentaho.agilebi.pdi.modeler.ModelerException; import org.pentaho.agilebi.pdi.modeler.ModelerWorkspace; import org.pentaho.agilebi.pdi.modeler.ModelerWorkspaceUtil; import org.pentaho.agilebi.pdi.wizard.EmbeddedWizard; import org.pentaho.commons.metadata.mqleditor.MqlQuery; import org.pentaho.commons.metadata.mqleditor.editor.MQLEditorService; import org.pentaho.commons.metadata.mqleditor.editor.SwtMqlEditor; import org.pentaho.commons.metadata.mqleditor.editor.service.MQLEditorServiceImpl; import org.pentaho.commons.metadata.mqleditor.editor.service.util.MQLEditorServiceDelegate; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.metadata.query.model.util.QueryXmlHelper; import org.pentaho.metadata.repository.IMetadataDomainRepository; import org.pentaho.reporting.engine.classic.core.AbstractReportDefinition; import org.pentaho.reporting.engine.classic.core.CompoundDataFactory; import org.pentaho.reporting.engine.classic.core.MetaAttributeNames; import org.pentaho.reporting.engine.classic.core.MetaTableModel; import org.pentaho.reporting.engine.classic.core.ReportDataFactoryException; import org.pentaho.reporting.engine.classic.core.states.datarow.StaticDataRow; import org.pentaho.reporting.engine.classic.core.wizard.DataAttributes; import org.pentaho.reporting.engine.classic.core.wizard.DataSchema; import org.pentaho.reporting.engine.classic.core.wizard.DataSchemaModel; import org.pentaho.reporting.engine.classic.core.wizard.DefaultDataAttributeContext; import org.pentaho.reporting.engine.classic.extensions.datasources.pmd.IPmdConnectionProvider; import org.pentaho.reporting.engine.classic.extensions.datasources.pmd.PmdConnectionProvider; import org.pentaho.reporting.engine.classic.extensions.datasources.pmd.PmdDataFactory; import org.pentaho.reporting.engine.classic.wizard.ui.xul.WizardEditorModel; import org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep; import org.pentaho.reporting.libraries.base.util.DebugLog; import org.pentaho.reporting.libraries.base.util.StringUtils; import org.pentaho.reporting.ui.datasources.pmd.PmdPreviewWorker; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.XulLoader; import org.pentaho.ui.xul.XulServiceCallback; import org.pentaho.ui.xul.components.XulButton; import org.pentaho.ui.xul.components.XulLabel; import org.pentaho.ui.xul.impl.AbstractXulEventHandler; import org.pentaho.ui.xul.impl.DefaultXulOverlay; import org.pentaho.ui.xul.swt.SwtXulLoader; /** * TODO: Document Me * * @author William Seyler */ public class DataSourceAndQueryStep extends AbstractWizardStep { private static final String DATASOURCE_AND_QUERY_STEP_OVERLAY = "org/pentaho/agilebi/pdi/wizard/ui/xul/res/datasource_and_query_step_Overlay.xul"; //$NON-NLS-1$ private static final String HANDLER_NAME = "datasource_and_query_step_handler"; //$NON-NLS-1$ private static final String CURRENT_QUERY_PROPERTY_NAME = "currentQuery"; //$NON-NLS-1$ private static final String DATA_SOURCE_NAME_LABEL_ID = "data_source_name_label"; //$NON-NLS-1$ private static final String AVAILABLE_COLUMNS_PROPERTY_NAME = "availableColumns"; //$NON-NLS-1$ private static final String ELEMENTS_PROPERTY_NAME = "elements"; //$NON-NLS-1$ private static final String QUERY_RESULT_LIST_ID = "query_result_list"; //$NON-NLS-1$ private static final String NEXT_BTN_ID = "next_btn"; //$NON-NLS-1$ private static final String DEFAULT = "default"; //$NON-NLS-1$ private PmdDataFactory df; private ModelerWorkspace model; private File modelFile; private List<String> availableColumns; /** * @author wseyler * DatasourceAndQueryStepHandler * A concrete implementation of AbstractXulEventHandler that defines a name for * itself and contains methods that correspond to onClick and onCommand markups * in the corresponding *.xul file. */ protected class DatasourceAndQueryStepHandler extends AbstractXulEventHandler { public DatasourceAndQueryStepHandler() { } public String getName() { return HANDLER_NAME; } private IMetadataDomainRepository getDomainRepo() throws ReportDataFactoryException { IPmdConnectionProvider connectionProvider = ((PmdDataFactory) getEditorModel().getReportDefinition().getDataFactory()).getConnectionProvider(); IMetadataDomainRepository repo = connectionProvider.getMetadataDomainRepository(DEFAULT, getEditorModel().getReportDefinition().getResourceManager(), getEditorModel().getReportDefinition().getContentBase(), df.getXmiFile()); return repo; } private MQLEditorServiceDelegate getMqlServiceDelegate() throws ReportDataFactoryException{ MQLEditorServiceDelegate delegate = new MQLEditorServiceDelegate(getDomainRepo()) { @Override public String[][] getPreviewData(MqlQuery query, int page, int limit) { org.pentaho.metadata.query.model.Query mqlQuery = convertQueryModel(query); String mqlString = new QueryXmlHelper().toXML(mqlQuery); PmdDataFactory df = (PmdDataFactory) getEditorModel().getReportDefinition().getDataFactory(); df.setQuery("default", mqlString); PmdPreviewWorker worker = new PmdPreviewWorker(df, "default", 0, limit); worker.run(); if(worker.getException() != null){ worker.getException().printStackTrace(); } TableModel model = worker.getResultTableModel(); int colCount = model.getColumnCount(); int rowCount = model.getRowCount(); String[][] results = new String[rowCount][colCount]; for(int y = 0; y < rowCount; y++ ){ for(int x=0; x < colCount; x++){ results[y][x] = model.getValueAt(y, x).toString(); } } return results; } }; return delegate; } private MQLEditorService getMqlService(MQLEditorServiceDelegate delegate){ MQLEditorServiceImpl mqlService = new MQLEditorServiceImpl(delegate) { @Override public void getPreviewData(MqlQuery query, int page, int limit, XulServiceCallback<String[][]> callback) { callback.success(delegate.getPreviewData(query, page, limit)); } }; return mqlService; } /** * doEditQuery() * Updates (or creates) a query using the PME data source. */ public void doEditQuery() { try { IMetadataDomainRepository repo = getDomainRepo(); MQLEditorServiceDelegate delegate = getMqlServiceDelegate(); SwtMqlEditor editor = new SwtMqlEditor(repo, getMqlService(delegate), delegate){ @Override protected XulLoader getLoader() { SwtXulLoader loader; try { loader = new SwtXulLoader(); loader.registerClassLoader(getClass().getClassLoader()); return loader; } catch (XulException e) { e.printStackTrace(); } return null; } }; String queryString = null; if (df != null && df.getQuery(DEFAULT) != null) { queryString = df.getQuery(DEFAULT); editor.setQuery(queryString); } editor.addOverlay(new DefaultXulOverlay("org/pentaho/agilebi/pdi/wizard/ui/xul/res/mqleditor-overlay.xul")); editor.show(); if (editor.getOkClicked()) { queryString = editor.getQuery(); df.setQuery(DEFAULT, queryString); setCurrentQuery(DEFAULT); } } catch (Exception e) { getDesignTimeContext().userError(e); } } } public DataSourceAndQueryStep() { super(); } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep#stepActivating() * * stepActivating() * When this step activates we check to see if we've already been here and if we haven't then we * creates the model 'n'.xmi file from the model. * * If we're coming back in then we just get the current data source and manipulate that. */ public void stepActivating() { super.stepActivating(); if (model != null && df == null) { // Populate a PmdDataFactoryClass for the report definition to use File modelsDir = new File("models"); //$NON-NLS-1$ modelsDir.mkdirs(); int idx = 1; boolean looking = true; String fileName = ""; //$NON-NLS-1$ String modelName = ""; //$NON-NLS-1$ while( looking ) { modelName = "Model "+idx; //$NON-NLS-1$ fileName = "models/"+modelName+".xmi"; //$NON-NLS-1$ //$NON-NLS-2$ modelFile = new File(fileName); if( !modelFile.exists() ) { looking = false; } idx++; } model.setFileName(fileName); model.setModelName(modelName); try { ModelerWorkspaceUtil.autoModelFlat(model); ModelerWorkspaceUtil.saveWorkspace( model, fileName); } catch (ModelerException e1) { getDesignTimeContext().userError(e1); } if (getEditorModel().getReportDefinition().getDataFactory() != null && getEditorModel().getReportDefinition().getDataFactory() instanceof CompoundDataFactory) { CompoundDataFactory cdf = (CompoundDataFactory) getEditorModel().getReportDefinition().getDataFactory(); for (int i=0; i<cdf.size(); i++) { cdf.remove(i); } } df = new PmdDataFactory(); PmdConnectionProvider connectionProvider = new PmdConnectionProvider(); df.setConnectionProvider(connectionProvider); try { df.setXmiFile(modelFile.getCanonicalPath()); } catch (IOException e) { getDesignTimeContext().userError(e); } df.setDomainId(DEFAULT); getEditorModel().getReportDefinition().setDataFactory(df); } else { // editing existing try { df = (PmdDataFactory) getEditorModel().getReportDefinition().getDataFactory(); } catch (ClassCastException e) { df = (PmdDataFactory)((CompoundDataFactory)getEditorModel().getReportDefinition().getDataFactory()).getDataFactoryForQuery(DEFAULT); } } updateGui(); setValid(validateStep()); } /** * updateGui() * * Updates the data source name label and populates the available columns list box. */ private void updateGui() { // Set the data source name XulLabel datasourceLabel = (XulLabel) getDocument().getElementById(DATA_SOURCE_NAME_LABEL_ID); if(datasourceLabel != null && modelFile != null){ datasourceLabel.setValue(modelFile.getName().substring(0, modelFile.getName().lastIndexOf('.'))); } createColumnsList(); } /** * createColumnsList() * * Get all the columns current defined by the query and creates a list of their friendly * names for display in the available columns list box. * * Additionally it removes any names whose source is not the query and then it sorts the * final list. */ private void createColumnsList() { // Set the available query fields; final DataSchemaModel dataSchemaModel = getEditorModel().getDataSchema(); final DataSchema dataSchema = dataSchemaModel.getDataSchema(); final String[] names = dataSchema.getNames(); Arrays.sort(names); ArrayList<String> items = new ArrayList<String>(); if (names != null) { final DefaultDataAttributeContext dataAttributeContext = new DefaultDataAttributeContext(); for ( String name : names ) { final DataAttributes attributes = dataSchema.getAttributes(name); final String source = (String) attributes.getMetaAttribute(MetaAttributeNames.Core.NAMESPACE, MetaAttributeNames.Core.SOURCE, String.class, dataAttributeContext); if ( !source.equals("environment") && !source.equals("parameter") ) { items.add((String) attributes.getMetaAttribute (MetaAttributeNames.Formatting.NAMESPACE, MetaAttributeNames.Formatting.LABEL, String.class, dataAttributeContext)); } } } if (items.size() < 1) { items.add(BaseMessages.getString(EmbeddedWizard.class,"DataSourceAndQueryStep.no_defined_fields")); //$NON-NLS-1$ } setAvailableColumns(items); } /** * @return true if the query can be executed. */ protected boolean validateStep() { // If we have no createdDataFactory and we don't have anything in the model then we can't continue final AbstractReportDefinition reportDefinition = getEditorModel().getReportDefinition(); if (reportDefinition.getDataFactory() == null || StringUtils.isEmpty(reportDefinition.getQuery())) { DebugLog.log("Have no query or no datafactory " + //$NON-NLS-1$ reportDefinition.getDataFactory() + " " + reportDefinition.getQuery()); //$NON-NLS-1$ return false; } // if we have a DataFactory and a query make sure that they are contained in cdf. final String queryName = reportDefinition.getQuery(); if (df == null || df.isQueryExecutable(queryName, new StaticDataRow()) == false) { return false; } try { final AbstractReportDefinition abstractReportDefinition = (AbstractReportDefinition) reportDefinition.derive(); abstractReportDefinition.setDataFactory(df); final DataSchemaModel schemaModel = WizardEditorModel.compileDataSchemaModel(abstractReportDefinition); return schemaModel.isValid(); } catch (Exception ee) { getDesignTimeContext().userError(ee); return false; } } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep#createPresentationComponent(org.pentaho.ui.xul.XulDomContainer) * * Loads the overlay for this step and hooks up the event handler */ public void createPresentationComponent(XulDomContainer mainWizardContainer) throws XulException { super.createPresentationComponent(mainWizardContainer); mainWizardContainer.loadOverlay(DATASOURCE_AND_QUERY_STEP_OVERLAY); mainWizardContainer.addEventHandler(new DatasourceAndQueryStepHandler()); } /** * @return the currently defined query */ public String getCurrentQuery() { return getEditorModel().getReportDefinition().getQuery(); } /** * @param currentQuery set the current query to the argument 'currentQuery' and fires * a property change event for objects that have registered. */ public void setCurrentQuery(String currentQuery) { String oldQuery = getCurrentQuery(); getEditorModel().updateQuery(df, DEFAULT); this.firePropertyChange(CURRENT_QUERY_PROPERTY_NAME, oldQuery, currentQuery); this.setValid(validateStep()); updateGui(); } /** * @param availableColumns the availableColumns to set once set it fires and property * change event. */ public void setAvailableColumns(List<String> newValue) { List<String> oldValue = this.availableColumns; this.availableColumns = newValue; this.firePropertyChange(AVAILABLE_COLUMNS_PROPERTY_NAME, oldValue, newValue); } /** * @return the availableColumns */ public List<String> getAvailableColumns() { return availableColumns; } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep#setValid(boolean) * * sets the validity of this step. If this is set to true the 'next' and preview button will * be available. */ protected void setValid(final boolean valid) { XulButton nextButton = (XulButton) getDocument().getElementById(NEXT_BTN_ID); nextButton.setDisabled(!valid); } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.WizardStep#getStepName() * * returns the internationalized step name that appears in the step list. */ public String getStepName() { return BaseMessages.getString(EmbeddedWizard.class,"DataSourceAndQueryStep.name"); //$NON-NLS-1$ } public void setModel(ModelerWorkspace model) { this.model = model; } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.WizardStep#setBindings() * * Binds the available columns property to the query result list. */ public void setBindings() { getBindingFactory().createBinding(this, AVAILABLE_COLUMNS_PROPERTY_NAME, QUERY_RESULT_LIST_ID, ELEMENTS_PROPERTY_NAME); } }
src/org/pentaho/agilebi/pdi/wizard/ui/xul/steps/DataSourceAndQueryStep.java
/* * This program is free software; you can redistribute it and/or modify it under the * terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software * Foundation. * * You should have received a copy of the GNU Lesser General Public License along with this * program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html * or from the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * Copyright (c) 2009 Pentaho Corporation.. All rights reserved. */ package org.pentaho.agilebi.pdi.wizard.ui.xul.steps; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.pentaho.agilebi.pdi.modeler.ModelerException; import org.pentaho.agilebi.pdi.modeler.ModelerWorkspace; import org.pentaho.agilebi.pdi.modeler.ModelerWorkspaceUtil; import org.pentaho.agilebi.pdi.wizard.EmbeddedWizard; import org.pentaho.commons.metadata.mqleditor.editor.SwtMqlEditor; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.metadata.repository.IMetadataDomainRepository; import org.pentaho.reporting.engine.classic.core.AbstractReportDefinition; import org.pentaho.reporting.engine.classic.core.CompoundDataFactory; import org.pentaho.reporting.engine.classic.core.MetaAttributeNames; import org.pentaho.reporting.engine.classic.core.ReportDataFactoryException; import org.pentaho.reporting.engine.classic.core.states.datarow.StaticDataRow; import org.pentaho.reporting.engine.classic.core.wizard.DataAttributes; import org.pentaho.reporting.engine.classic.core.wizard.DataSchema; import org.pentaho.reporting.engine.classic.core.wizard.DataSchemaModel; import org.pentaho.reporting.engine.classic.core.wizard.DefaultDataAttributeContext; import org.pentaho.reporting.engine.classic.extensions.datasources.pmd.IPmdConnectionProvider; import org.pentaho.reporting.engine.classic.extensions.datasources.pmd.PmdConnectionProvider; import org.pentaho.reporting.engine.classic.extensions.datasources.pmd.PmdDataFactory; import org.pentaho.reporting.engine.classic.wizard.ui.xul.WizardEditorModel; import org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep; import org.pentaho.reporting.libraries.base.util.DebugLog; import org.pentaho.reporting.libraries.base.util.StringUtils; import org.pentaho.ui.xul.XulDomContainer; import org.pentaho.ui.xul.XulException; import org.pentaho.ui.xul.XulLoader; import org.pentaho.ui.xul.XulRunner; import org.pentaho.ui.xul.components.XulButton; import org.pentaho.ui.xul.components.XulLabel; import org.pentaho.ui.xul.impl.AbstractXulEventHandler; import org.pentaho.ui.xul.impl.DefaultXulOverlay; import org.pentaho.ui.xul.swt.SwtXulLoader; import org.pentaho.ui.xul.swt.SwtXulRunner; /** * TODO: Document Me * * @author William Seyler */ public class DataSourceAndQueryStep extends AbstractWizardStep { private static final String DATASOURCE_AND_QUERY_STEP_OVERLAY = "org/pentaho/agilebi/pdi/wizard/ui/xul/res/datasource_and_query_step_Overlay.xul"; //$NON-NLS-1$ private static final String HANDLER_NAME = "datasource_and_query_step_handler"; //$NON-NLS-1$ private static final String CURRENT_QUERY_PROPERTY_NAME = "currentQuery"; //$NON-NLS-1$ private static final String DATA_SOURCE_NAME_LABEL_ID = "data_source_name_label"; //$NON-NLS-1$ private static final String AVAILABLE_COLUMNS_PROPERTY_NAME = "availableColumns"; //$NON-NLS-1$ private static final String ELEMENTS_PROPERTY_NAME = "elements"; //$NON-NLS-1$ private static final String QUERY_RESULT_LIST_ID = "query_result_list"; //$NON-NLS-1$ private static final String NEXT_BTN_ID = "next_btn"; //$NON-NLS-1$ private static final String DEFAULT = "default"; //$NON-NLS-1$ private PmdDataFactory df; private ModelerWorkspace model; private File modelFile; private List<String> availableColumns; /** * @author wseyler * DatasourceAndQueryStepHandler * A concrete implementation of AbstractXulEventHandler that defines a name for * itself and contains methods that correspond to onClick and onCommand markups * in the corresponding *.xul file. */ protected class DatasourceAndQueryStepHandler extends AbstractXulEventHandler { public DatasourceAndQueryStepHandler() { } public String getName() { return HANDLER_NAME; } private IMetadataDomainRepository getDomainRepo() throws ReportDataFactoryException { IPmdConnectionProvider connectionProvider = ((PmdDataFactory) getEditorModel().getReportDefinition().getDataFactory()).getConnectionProvider(); IMetadataDomainRepository repo = connectionProvider.getMetadataDomainRepository(DEFAULT, getEditorModel().getReportDefinition().getResourceManager(), getEditorModel().getReportDefinition().getContentBase(), df.getXmiFile()); return repo; } /** * doEditQuery() * Updates (or creates) a query using the PME data source. */ public void doEditQuery() { try { IMetadataDomainRepository repo = getDomainRepo(); SwtMqlEditor editor = new SwtMqlEditor(repo){ @Override protected XulLoader getLoader() { SwtXulLoader loader; try { loader = new SwtXulLoader(); loader.registerClassLoader(getClass().getClassLoader()); return loader; } catch (XulException e) { e.printStackTrace(); } return null; } }; String queryString = null; if (df != null && df.getQuery(DEFAULT) != null) { queryString = df.getQuery(DEFAULT); editor.setQuery(queryString); } editor.addOverlay(new DefaultXulOverlay("org/pentaho/agilebi/pdi/wizard/ui/xul/res/mqleditor-overlay.xul")); editor.show(); if (editor.getOkClicked()) { queryString = editor.getQuery(); df.setQuery(DEFAULT, queryString); setCurrentQuery(DEFAULT); } } catch (Exception e) { getDesignTimeContext().userError(e); } } } public DataSourceAndQueryStep() { super(); } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep#stepActivating() * * stepActivating() * When this step activates we check to see if we've already been here and if we haven't then we * creates the model 'n'.xmi file from the model. * * If we're coming back in then we just get the current data source and manipulate that. */ public void stepActivating() { super.stepActivating(); if (model != null && df == null) { // Populate a PmdDataFactoryClass for the report definition to use File modelsDir = new File("models"); //$NON-NLS-1$ modelsDir.mkdirs(); int idx = 1; boolean looking = true; String fileName = ""; //$NON-NLS-1$ String modelName = ""; //$NON-NLS-1$ while( looking ) { modelName = "Model "+idx; //$NON-NLS-1$ fileName = "models/"+modelName+".xmi"; //$NON-NLS-1$ //$NON-NLS-2$ modelFile = new File(fileName); if( !modelFile.exists() ) { looking = false; } idx++; } model.setFileName(fileName); model.setModelName(modelName); try { ModelerWorkspaceUtil.autoModelFlat(model); ModelerWorkspaceUtil.saveWorkspace( model, fileName); } catch (ModelerException e1) { getDesignTimeContext().userError(e1); } if (getEditorModel().getReportDefinition().getDataFactory() != null && getEditorModel().getReportDefinition().getDataFactory() instanceof CompoundDataFactory) { CompoundDataFactory cdf = (CompoundDataFactory) getEditorModel().getReportDefinition().getDataFactory(); for (int i=0; i<cdf.size(); i++) { cdf.remove(i); } } df = new PmdDataFactory(); PmdConnectionProvider connectionProvider = new PmdConnectionProvider(); df.setConnectionProvider(connectionProvider); try { df.setXmiFile(modelFile.getCanonicalPath()); } catch (IOException e) { getDesignTimeContext().userError(e); } df.setDomainId(DEFAULT); getEditorModel().getReportDefinition().setDataFactory(df); } else { // editing existing try { df = (PmdDataFactory) getEditorModel().getReportDefinition().getDataFactory(); } catch (ClassCastException e) { df = (PmdDataFactory)((CompoundDataFactory)getEditorModel().getReportDefinition().getDataFactory()).getDataFactoryForQuery(DEFAULT); } } updateGui(); setValid(validateStep()); } /** * updateGui() * * Updates the data source name label and populates the available columns list box. */ private void updateGui() { // Set the data source name XulLabel datasourceLabel = (XulLabel) getDocument().getElementById(DATA_SOURCE_NAME_LABEL_ID); if(datasourceLabel != null && modelFile != null){ datasourceLabel.setValue(modelFile.getName().substring(0, modelFile.getName().lastIndexOf('.'))); } createColumnsList(); } /** * createColumnsList() * * Get all the columns current defined by the query and creates a list of their friendly * names for display in the available columns list box. * * Additionally it removes any names whose source is not the query and then it sorts the * final list. */ private void createColumnsList() { // Set the available query fields; final DataSchemaModel dataSchemaModel = getEditorModel().getDataSchema(); final DataSchema dataSchema = dataSchemaModel.getDataSchema(); final String[] names = dataSchema.getNames(); Arrays.sort(names); ArrayList<String> items = new ArrayList<String>(); if (names != null) { final DefaultDataAttributeContext dataAttributeContext = new DefaultDataAttributeContext(); for ( String name : names ) { final DataAttributes attributes = dataSchema.getAttributes(name); final String source = (String) attributes.getMetaAttribute(MetaAttributeNames.Core.NAMESPACE, MetaAttributeNames.Core.SOURCE, String.class, dataAttributeContext); if ( !source.equals("environment") && !source.equals("parameter") ) { items.add((String) attributes.getMetaAttribute (MetaAttributeNames.Formatting.NAMESPACE, MetaAttributeNames.Formatting.LABEL, String.class, dataAttributeContext)); } } } if (items.size() < 1) { items.add(BaseMessages.getString(EmbeddedWizard.class,"DataSourceAndQueryStep.no_defined_fields")); //$NON-NLS-1$ } setAvailableColumns(items); } /** * @return true if the query can be executed. */ protected boolean validateStep() { // If we have no createdDataFactory and we don't have anything in the model then we can't continue final AbstractReportDefinition reportDefinition = getEditorModel().getReportDefinition(); if (reportDefinition.getDataFactory() == null || StringUtils.isEmpty(reportDefinition.getQuery())) { DebugLog.log("Have no query or no datafactory " + //$NON-NLS-1$ reportDefinition.getDataFactory() + " " + reportDefinition.getQuery()); //$NON-NLS-1$ return false; } // if we have a DataFactory and a query make sure that they are contained in cdf. final String queryName = reportDefinition.getQuery(); if (df == null || df.isQueryExecutable(queryName, new StaticDataRow()) == false) { return false; } try { final AbstractReportDefinition abstractReportDefinition = (AbstractReportDefinition) reportDefinition.derive(); abstractReportDefinition.setDataFactory(df); final DataSchemaModel schemaModel = WizardEditorModel.compileDataSchemaModel(abstractReportDefinition); return schemaModel.isValid(); } catch (Exception ee) { getDesignTimeContext().userError(ee); return false; } } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep#createPresentationComponent(org.pentaho.ui.xul.XulDomContainer) * * Loads the overlay for this step and hooks up the event handler */ public void createPresentationComponent(XulDomContainer mainWizardContainer) throws XulException { super.createPresentationComponent(mainWizardContainer); mainWizardContainer.loadOverlay(DATASOURCE_AND_QUERY_STEP_OVERLAY); mainWizardContainer.addEventHandler(new DatasourceAndQueryStepHandler()); } /** * @return the currently defined query */ public String getCurrentQuery() { return getEditorModel().getReportDefinition().getQuery(); } /** * @param currentQuery set the current query to the argument 'currentQuery' and fires * a property change event for objects that have registered. */ public void setCurrentQuery(String currentQuery) { String oldQuery = getCurrentQuery(); getEditorModel().updateQuery(df, DEFAULT); this.firePropertyChange(CURRENT_QUERY_PROPERTY_NAME, oldQuery, currentQuery); this.setValid(validateStep()); updateGui(); } /** * @param availableColumns the availableColumns to set once set it fires and property * change event. */ public void setAvailableColumns(List<String> newValue) { List<String> oldValue = this.availableColumns; this.availableColumns = newValue; this.firePropertyChange(AVAILABLE_COLUMNS_PROPERTY_NAME, oldValue, newValue); } /** * @return the availableColumns */ public List<String> getAvailableColumns() { return availableColumns; } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep#setValid(boolean) * * sets the validity of this step. If this is set to true the 'next' and preview button will * be available. */ protected void setValid(final boolean valid) { XulButton nextButton = (XulButton) getDocument().getElementById(NEXT_BTN_ID); nextButton.setDisabled(!valid); } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.WizardStep#getStepName() * * returns the internationalized step name that appears in the step list. */ public String getStepName() { return BaseMessages.getString(EmbeddedWizard.class,"DataSourceAndQueryStep.name"); //$NON-NLS-1$ } public void setModel(ModelerWorkspace model) { this.model = model; } /* (non-Javadoc) * @see org.pentaho.reporting.engine.classic.wizard.ui.xul.components.WizardStep#setBindings() * * Binds the available columns property to the query result list. */ public void setBindings() { getBindingFactory().createBinding(this, AVAILABLE_COLUMNS_PROPERTY_NAME, QUERY_RESULT_LIST_ID, ELEMENTS_PROPERTY_NAME); } }
[AGILEBI-328] Enabled previewing of queries.
src/org/pentaho/agilebi/pdi/wizard/ui/xul/steps/DataSourceAndQueryStep.java
[AGILEBI-328] Enabled previewing of queries.
<ide><path>rc/org/pentaho/agilebi/pdi/wizard/ui/xul/steps/DataSourceAndQueryStep.java <ide> import java.util.Arrays; <ide> import java.util.List; <ide> <add>import javax.swing.table.TableModel; <add> <ide> import org.pentaho.agilebi.pdi.modeler.ModelerException; <ide> import org.pentaho.agilebi.pdi.modeler.ModelerWorkspace; <ide> import org.pentaho.agilebi.pdi.modeler.ModelerWorkspaceUtil; <ide> import org.pentaho.agilebi.pdi.wizard.EmbeddedWizard; <add>import org.pentaho.commons.metadata.mqleditor.MqlQuery; <add>import org.pentaho.commons.metadata.mqleditor.editor.MQLEditorService; <ide> import org.pentaho.commons.metadata.mqleditor.editor.SwtMqlEditor; <add>import org.pentaho.commons.metadata.mqleditor.editor.service.MQLEditorServiceImpl; <add>import org.pentaho.commons.metadata.mqleditor.editor.service.util.MQLEditorServiceDelegate; <ide> import org.pentaho.di.i18n.BaseMessages; <add>import org.pentaho.metadata.query.model.util.QueryXmlHelper; <ide> import org.pentaho.metadata.repository.IMetadataDomainRepository; <ide> import org.pentaho.reporting.engine.classic.core.AbstractReportDefinition; <ide> import org.pentaho.reporting.engine.classic.core.CompoundDataFactory; <ide> import org.pentaho.reporting.engine.classic.core.MetaAttributeNames; <add>import org.pentaho.reporting.engine.classic.core.MetaTableModel; <ide> import org.pentaho.reporting.engine.classic.core.ReportDataFactoryException; <ide> import org.pentaho.reporting.engine.classic.core.states.datarow.StaticDataRow; <ide> import org.pentaho.reporting.engine.classic.core.wizard.DataAttributes; <ide> import org.pentaho.reporting.engine.classic.wizard.ui.xul.components.AbstractWizardStep; <ide> import org.pentaho.reporting.libraries.base.util.DebugLog; <ide> import org.pentaho.reporting.libraries.base.util.StringUtils; <add>import org.pentaho.reporting.ui.datasources.pmd.PmdPreviewWorker; <ide> import org.pentaho.ui.xul.XulDomContainer; <ide> import org.pentaho.ui.xul.XulException; <ide> import org.pentaho.ui.xul.XulLoader; <del>import org.pentaho.ui.xul.XulRunner; <add>import org.pentaho.ui.xul.XulServiceCallback; <ide> import org.pentaho.ui.xul.components.XulButton; <ide> import org.pentaho.ui.xul.components.XulLabel; <ide> import org.pentaho.ui.xul.impl.AbstractXulEventHandler; <ide> import org.pentaho.ui.xul.impl.DefaultXulOverlay; <ide> import org.pentaho.ui.xul.swt.SwtXulLoader; <del>import org.pentaho.ui.xul.swt.SwtXulRunner; <ide> /** <ide> * TODO: Document Me <ide> * <ide> { <ide> public DatasourceAndQueryStepHandler() <ide> { <add> <add> <ide> } <ide> <ide> public String getName() <ide> IMetadataDomainRepository repo = connectionProvider.getMetadataDomainRepository(DEFAULT, getEditorModel().getReportDefinition().getResourceManager(), getEditorModel().getReportDefinition().getContentBase(), df.getXmiFile()); <ide> <ide> return repo; <add> } <add> <add> <add> private MQLEditorServiceDelegate getMqlServiceDelegate() throws ReportDataFactoryException{ <add> <add> MQLEditorServiceDelegate delegate = new MQLEditorServiceDelegate(getDomainRepo()) { <add> @Override <add> public String[][] getPreviewData(MqlQuery query, int page, int limit) { <add> org.pentaho.metadata.query.model.Query mqlQuery = convertQueryModel(query); <add> String mqlString = new QueryXmlHelper().toXML(mqlQuery); <add> <add> <add> PmdDataFactory df = (PmdDataFactory) getEditorModel().getReportDefinition().getDataFactory(); <add> df.setQuery("default", mqlString); <add> <add> PmdPreviewWorker worker = new PmdPreviewWorker(df, "default", 0, limit); <add> worker.run(); <add> if(worker.getException() != null){ <add> worker.getException().printStackTrace(); <add> } <add> TableModel model = worker.getResultTableModel(); <add> int colCount = model.getColumnCount(); <add> int rowCount = model.getRowCount(); <add> String[][] results = new String[rowCount][colCount]; <add> for(int y = 0; y < rowCount; y++ ){ <add> for(int x=0; x < colCount; x++){ <add> results[y][x] = model.getValueAt(y, x).toString(); <add> } <add> } <add> return results; <add> } <add> }; <add> return delegate; <add> } <add> <add> private MQLEditorService getMqlService(MQLEditorServiceDelegate delegate){ <add> <add> MQLEditorServiceImpl mqlService = new MQLEditorServiceImpl(delegate) { <add> @Override <add> public void getPreviewData(MqlQuery query, int page, int limit, XulServiceCallback<String[][]> callback) { <add> callback.success(delegate.getPreviewData(query, page, limit)); <add> } <add> }; <add> return mqlService; <ide> } <ide> <ide> /** <ide> <ide> IMetadataDomainRepository repo = getDomainRepo(); <ide> <del> SwtMqlEditor editor = new SwtMqlEditor(repo){ <add> <add> MQLEditorServiceDelegate delegate = getMqlServiceDelegate(); <add> <add> SwtMqlEditor editor = new SwtMqlEditor(repo, getMqlService(delegate), delegate){ <ide> <ide> @Override <ide> protected XulLoader getLoader() {
JavaScript
mit
41f9f091df9beca7afe12cddb28d62784f0fb88b
0
oratory/wago.io,oratory/wago.io
const battlenet = require('./battlenet') const cloudflare = require('cloudflare')({token: config.cloudflare.dnsToken}) const decompress = require('@atomic-reactor/decompress') const image = require('./image') const lua = require('./lua') const md5 = require('md5') const mkdirp = require('mkdirp') const path = require('path') const updateDataCaches = require('../../middlewares/updateLocalCache') const getCode = require('./code-detection/get-code') const luacheck = require('./luacheck') const codeMetrics = require('./codeMetrics') const ENUM = require('../../middlewares/enum') const logger = require('../../middlewares/matomo') const logError = require('../../middlewares/matomoErrors') module.exports = async (task, data) => { try { switch (task) { case 'UpdatePatreonAccounts': return await UpdatePatreonAccounts() case 'UpdateWeeklyMDT': return await UpdateWeeklyMDT() case 'ComputeStatistics': return await ComputeStatistics() case 'DiscordMessage': return await DiscordMessage(data) case 'UpdateValidCharacters': return await UpdateValidCharacters() case 'UpdateGuildMembership': return await UpdateGuildMembership() case 'UpdateLatestAddonReleases': return await UpdateLatestAddonReleases() case 'UpdateTopLists': return await UpdateTopLists() case 'UpdateTwitchStatus': return await UpdateTwitchStatus(data) case 'UpdateWagoOfTheMoment': return await UpdateWagoOfTheMoment() case 'UpdateActiveUserCount': return await UpdateActiveUserCount() case 'UpdateLatestNews': return await UpdateLatestNews() case 'SyncElastic': return await SyncElastic(data.table) case 'SyncMeili': return await SyncMeili(data.table) case 'ProcessCode': return await ProcessCode(data) case 'ProcessAllCode': return await ProcessAllCode() case 'CleanTaskQueue': return taskQueue.clean(10000) default: throw {name: 'Unknown task', message: 'Unknown task ' + task} } } catch (e) { console.log(e) logError(e, 'Task ', task) } } async function UpdateWagoOfTheMoment () { const data = await WagoItem.randomOfTheMoment() await SiteData.findOneAndUpdate({_id: 'WagoOfTheMoment'}, {value: data}, {upsert: true}).exec() await updateDataCaches.queue('WagoOfTheMoment') } async function UpdateTwitchStatus (channel) { var twitchToken = await redis.get('twitch:appToken') if (!twitchToken) { const getToken = await axios.post(`https://id.twitch.tv/oauth2/token?client_id=${config.auth.twitch.clientID}&client_secret=${config.auth.twitch.clientSecret}&grant_type=client_credentials`) if (getToken && getToken.data && getToken.data.access_token) { twitchToken = getToken.data.access_token redis.set('twitch:appToken', twitchToken, 'EX', getToken.data.expires_in) } } var streams = [] var status = {} if (!channel || typeof channel !== 'string') { const cfg = await SiteData.get('EmbeddedStream') streams = cfg.streams } for (let i = 0; i < streams.length; i++) { let channel = streams[i].channel const req = await axios.get(`https://api.twitch.tv/helix/streams?user_login=${channel}`, { headers: { 'client-id': config.auth.twitch.clientID, 'Authorization': 'Bearer '+ twitchToken } }) await redis.set(`twitch:${channel}:live`, (req.data.data.length > 0)) status[channel] = (req.data.data.length > 0) } const streamers = await Streamer.find({}) var getStreams = [] for (let i = 0; i < streamers.length; i++) { getStreams.push(`user_login=${streamers[i].name}&`) } var twitchStreamers = [] while (getStreams.length) { let twitchUserQuery = getStreams.splice(0, 20) let twitchReq = await axios.get(`https://api.twitch.tv/helix/streams?${twitchUserQuery.join('')}`, { headers: { 'client-id': config.auth.twitch.clientID, 'Authorization': 'Bearer '+ twitchToken } }) if (twitchReq && twitchReq.data && twitchReq.data.data) { twitchStreamers = twitchStreamers.concat(twitchReq.data.data) } } for (let i = 0; i < streamers.length; i++) { for (let k = 0; k < twitchStreamers.length; k++) { if (twitchStreamers[k].user_name.toLowerCase() === streamers[i].name.toLowerCase()) { streamers[i].online = new Date(twitchStreamers[k].started_at) streamers[i].offline = null streamers[i].game = twitchStreamers[k].game_name streamers[i].title = twitchStreamers[k].title streamers[i].viewers = twitchStreamers[k].viewer_count - (streamers[i].wagoViewers || 0) streamers[i].name = twitchStreamers[k].user_name await streamers[i].save() streamers[i].ok = true await redis.set(`twitch:${streamers[i].name}:live`, 1) } } } for (let i = 0; i < streamers.length; i++) { if (!streamers[i].ok && streamers[i].online) { streamers[i].online = null streamers[i].offline = Date.now() streamers[i].viewers = 0 streamers[i].wagoViewers = 0 await streamers[i].save() await redis.del(`twitch:${streamers[i].name}:live`) await redis2.zremrangebyscore(`allEmbeds:${streamers[i].name}`, '-inf', '+inf') } } return status } async function UpdateLatestNews () { const docs = await Blog.find({publishStatus: 'publish'}).sort('-date').limit(1).populate('_userId') var news = [] docs.forEach((item) => { news.push({ content: item.content, date: item.date, format: item.format, title: item.title, _id: item._id, user: { username: item._userId.account.username, css: item._userId.roleclass } }) }) await SiteData.findOneAndUpdate({_id: 'LatestNews'}, {value: news}, {upsert: true}).exec() await updateDataCaches.queue('LatestNews') } async function UpdatePatreonAccounts () { nextURL = 'https://www.patreon.com/api/oauth2/v2/campaigns/562591/members?include=currently_entitled_tiers,user&fields%5Btier%5D=title' while (nextURL) { var response = await axios.get(nextURL, {headers: {Authorization: 'Bearer '+ config.auth.patreon.creatorToken}}) var patrons = response.data.data for (let i = 0; i < patrons.length; i++) { if (!patrons[i] || !patrons[i].relationships || !patrons[i].relationships.user || !patrons[i].relationships.user.data || !patrons[i].relationships.user.data.id) { continue } var user = await User.findOne({"patreon.id": patrons[i].relationships.user.data.id}) if (!user) { continue } var tier try { tier = patrons[i].relationships.currently_entitled_tiers.data[0].id } catch (e) { tier = 0 } // subscriber 1385924 // gold sub 1386010 user.roles.subscriber = tier > 0 user.roles.gold_subscriber = tier > 1385924 // user.roles.guild_subscriber = (!patrons[i].attributes.declined_since && patrons[i].attributes.amount_cents >= 1500) await user.save() } if (response.data.links && response.data.links.next) { nextURL = response.data.links.next } else { nextURL = null } } return } async function UpdateWeeklyMDT () { await battlenet.updateMDTWeeks() } async function UpdateTopLists () { var data = [] // favorites var imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorites All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite WeakAuras All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'CLASSIC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite Classic WeakAuras All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'TBC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite TBC WeakAuras All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'PLATER', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite Plater All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'TOTALRP3', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite Total RP All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'VUHDO', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite VuhDo All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'ELVUI', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite ElvUI All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }), lastOfSection: true }) // popular imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular WeakAuras This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'CLASSIC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular Classic WeakAuras This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'TBC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular TBC WeakAuras This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'PLATER', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular Plater This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'TOTALRP3', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular Total RP This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'VUHDO', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular VuhDo This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'ELVUI', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular ElvUI This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }), lastOfSection: true }) // installed imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.installed_count").select('_id name popularity.installed_count').limit(15).exec() data.push({title: 'Installed', imports: imports.map(x => { return {count: x.popularity.installed_count, display: '[-count-] install', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.installed_count").select('_id name popularity.installed_count').limit(15).exec() data.push({title: 'Installed WeakAuras', imports: imports.map(x => { return {count: x.popularity.installed_count, display: '[-count-] install', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'CLASSIC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.installed_count").select('_id name popularity.installed_count').limit(15).exec() data.push({title: 'Installed Classic WeakAuras', imports: imports.map(x => { return {count: x.popularity.installed_count, display: '[-count-] install', name: x.name, slug: x.slug} }), lastOfSection: true }) imports = await WagoItem.find({type: 'TBC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.installed_count").select('_id name popularity.installed_count').limit(15).exec() data.push({title: 'Installed TBC WeakAuras', imports: imports.map(x => { return {count: x.popularity.installed_count, display: '[-count-] install', name: x.name, slug: x.slug} }), lastOfSection: true }) // new and updated imports imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false, $where: "this.created.getTime() != this.modified.getTime()"}).sort({"modified": -1}).select('_id name modified').limit(15).exec() data.push({title: 'Recently Updated', imports: imports.map(x => { return {date: true, display: x.modified, name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false, $where: "this.created.getTime() == this.modified.getTime()"}).sort({"created": -1}).select('_id name created').limit(15).exec() data.push({title: 'Newest Imports', imports: imports.map(x => { return {date: true, display: x.created, name: x.name, slug: x.slug} }) }) // save data await SiteData.findOneAndUpdate({_id: 'TopLists'}, {value: data}, {upsert: true}).exec() await updateDataCaches.queue('TopLists') } async function DiscordMessage (data) { if (global.discordBot) { const author = await User.findById(data.author) const wago = await WagoItem.lookup(data.wago) if (data.type === 'comment') { const sendTo = await User.findOne({_id: data.to, "discord.options.messageOnComment": true}).select('discord').exec() if (sendTo && !author._id.equals(sendTo._id)) { discordBot.postComment(author, sendTo, wago, data.message) } } else if (data.type === 'update') { const stars = await WagoFavorites.find({type: 'Star', wagoID: wago._id}) for (let i = 0; i < stars.length; i++) { const sendTo = await User.findOne({_id: stars[i].userID, "discord.options.messageOnFaveUpdate": true}).select('discord').exec() if (sendTo && !author._id.equals(sendTo._id)) { discordBot.postUpdate(author, sendTo, wago) } } } } } async function UpdateValidCharacters () { const fourWeeksAgo = new Date() fourWeeksAgo.setDate(fourWeeksAgo.getDate() - 28) const users = await User.find({"battlenet.characters.1": {$exists: true}, $or: [{"battlenet.updateDate": {$exists: false}}, {"battlenet.updateDate": {$lt: fourWeeksAgo}}]}).limit(50).exec() for (let i = 0; i < users.length; i++) { var validChars = [] for (let k = 0; k < users[i].battlenet.characters.length; k++) { const status = await battlenet.lookupCharacterStatus(users[i].battlenet.characters[k].region, users[i].battlenet.characters[k].realm, users[i].battlenet.characters[k].name) if (status.error || !status.is_valid || (users[i].battlenet.characters[k].bnetID && users[i].battlenet.characters[k].bnetID != status.id)) { continue } else if (!users[i].battlenet.characters[k].bnetID) { users[i].battlenet.characters[k].bnetID = status.id } validChars.push(users[i].battlenet.characters[k]) } users[i].battlenet.updateDate = new Date() users[i].battlenet.characters = validChars await users[i].save() } } async function UpdateGuildMembership () { function guildRankSort(a, b) { if (a.rank > b.rank) return -1 else if (a.rank < b.rank) return 1 return 0 } function escapeRegExp(string) { return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string } var guildsChecked = [] const users = await User.find({"battlenet.guilds.1": {$exists: true}, $or: [{"roles.gold_subscriber": true}, {"roles.pro_subscriber": true}, {"roles.ambassador": true}, {"roles.developer": true}, {"roles.community_leader": true}, {"roles.artContestWinnerAug2018": true}]}).exec() const updateGuild = async function (guildKey) { const accountIdsInGuild = [] const accountsInGuild = [] if (guildKey.match(/@\d$/, '') || guildsChecked.indexOf(guildKey) >= 0) { return Promise.resolve() } guildsChecked.push(guildKey) const [region, realm, guildname] = guildKey.split(/@/g) const guild = await battlenet.lookupGuild(region, realm, guildname) if (!guild || !guild.members) { // if unknown error (likely 500) return Promise.resolve() } else if (guild.error === 'NOGUILD') { // if this guild no longer exists, remove all members from it let exGuild = await User.find({"battlenet.guilds": guildKey}).exec() let deletePromise = new Promise(async (deleteDone) => { exGuild.forEach(async (exMember) => { let re = new RegExp('^' + guildKey + '(@\\d)?$') for (let g = exMember.battlenet.guilds.length - 1; g >= 0; g--) { if (exMember.battlenet.guilds[g].match(re)) { exMember.battlenet.guilds.splice(g, 1) } } await exMember.save() return deleteDone() }) }) await deletePromise return Promise.resolve() } else { // guild found! Match all wago users with guild guild.members.sort(guildRankSort) for (let j = 0; j < guild.members.length; j++) { let memberUser = await User.findOne({"battlenet.characters.region": region, "battlenet.characters.name": guild.members[j].character.name}) if (!memberUser) { continue } var memberID = memberUser._id.toString() if (accountsInGuild.indexOf(memberID) === -1) { accountIdsInGuild.push(memberUser._id) accountsInGuild.push(memberID) } // if new member to guild if (memberUser.battlenet.guilds.indexOf(guildKey) === -1) { memberUser.battlenet.guilds.push(guildKey) for (let k = guild.members[j].rank; k <= 9; k++) { memberUser.battlenet.guilds.push(`${guildKey}@${k}`) } } // else they are already in guild, but since they may have changed ranks // remove everything and re-add all current ranks else { let re = new RegExp('^' + escapeRegExp(guildKey) + '@\\d$') for (let g = 0; g < memberUser.battlenet.guilds.length; g++) { if (memberUser.battlenet.guilds[g].match(re)) { memberUser.battlenet.guilds.splice(g, 1) break } } for (let k = guild.members[j].rank; k <= 9; k++) { memberUser.battlenet.guilds.push(`${guildKey}@${k}`) } } if (guildKey === 'eu@twisting-nether@Method') { memberUser.roles.methodRaider = (guild.members[j].rank <= 4) } memberUser.battlenet.guilds = [...new Set(memberUser.battlenet.guilds)] await memberUser.save() } // remove old members let exGuild = await User.find({"battlenet.guilds": guildKey, _id: {$nin: accountIdsInGuild}}).exec() for (let d = 0; d < exGuild.length; d++) { let re = new RegExp('^' + guildKey + '(@\\d)?$') for (let g = exGuild[d].battlenet.guilds.length - 1; g >= 0; g--) { if (exGuild[d].battlenet.guilds[g].match(re)) { exGuild[d].battlenet.guilds.splice(g, 1) } } if (guildKey === 'eu@twisting-nether@Method') { exGuild[d].roles.methodRaider = false } await exGuild[d].save() } } } for (let i = 0; i < users.length; i++) { for (let j = 0; j < users[i].battlenet.guilds.length; j++) { await updateGuild(users[i].battlenet.guilds[j]) } } await updateGuild('eu@twisting-nether@Method') } async function ComputeStatistics () { // calc views this week const viewedDocs = await ViewsThisWeek.aggregate([{$group: { _id: '$wagoID', views: { $sum: 1 }}}]).exec() let totalImports = 0 let totalSum = 0 let totalSquared = 0 while (viewedDocs.length > 0) { // process in batches of 500 let items = viewedDocs.splice(0, 500) let ops = [] items.forEach((wago) => { ops.push({ updateOne: { filter: {_id: wago._id}, update: {'popularity.viewsThisWeek': wago.views} } }) if (wago.views > 5) { totalImports++ totalSum += wago.views totalSquared += wago.views * wago.views } }) await WagoItem.bulkWrite(ops) } let mean = totalSum / totalImports let standardDeviation = Math.sqrt((totalSquared - ((totalSum * totalSum) / totalImports)) / (totalSum - 1)) await redis.set('stats:standardDeviation:views', standardDeviation || 0) await redis.set('stats:mean:views', mean || 0) const recentDate = new Date() recentDate.setMonth(recentDate.getDate() - 18) // calc installs this month totalImports = 0 totalSum = 0 totalSquared = 0 const installDocs = await WagoFavorites.aggregate([ {$match: {type: 'Install', timestamp: {$gt: recentDate}}}, {$group: { _id: '$wagoID', installs: { $sum: 1 }}} ]).exec() while (installDocs.length > 0) { // process in batches of 500 var items = installDocs.splice(0, 500) items.forEach((wago) => { if (wago.installs > 5) { totalImports++ totalSum += wago.installs totalSquared += wago.installs * wago.installs } }) } mean = totalSum / totalImports standardDeviation = Math.sqrt((totalSquared - ((totalSum * totalSum) / totalImports)) / (totalSum - 1)) await redis.set('stats:standardDeviation:installs', standardDeviation || 0) await redis.set('stats:mean:installs', mean || 0) // calc stars this month totalImports = 0 totalSum = 0 totalSquared = 0 const starDocs = await WagoFavorites.aggregate([ {$match: {type: 'Star', timestamp: {$gt: recentDate}}}, {$group: { _id: '$wagoID', stars: { $sum: 1 }}} ]).exec() while (starDocs.length > 0) { // process in batches of 500 var items = starDocs.splice(0, 500) items.forEach((wago) => { if (wago.stars > 5) { totalImports++ totalSum += wago.stars totalSquared += wago.stars * wago.stars } }) } mean = totalSum / totalImports standardDeviation = Math.sqrt((totalSquared - ((totalSum * totalSum) / totalImports)) / (totalSum - 1)) await redis.set('stats:standardDeviation:stars', standardDeviation || 0) await redis.set('stats:mean:stars', mean || 0) } async function UpdateLatestAddonReleases () { const addons = [ {name: 'WeakAuras-2', host: 'github', url: 'https://api.github.com/repos/weakAuras/WeakAuras2/releases'}, {name: 'VuhDo', host: 'gitlab', url: 'https://gitlab.vuhdo.io/api/v4/projects/13/releases'}, {name: 'ElvUI', host: 'tukui', url: 'https://www.tukui.org/api.php?ui=elvui'}, {name: 'MDT', host: 'github', url: 'https://api.github.com/repos/Nnoggie/MythicDungeonTools/releases'}, ] var madeUpdate = false for (let addon of addons) { var release = {} try { const response = await axios.get(addon.url) if (addon.host === 'github') { var uniquePhases = {} for (const item of response.data) { release = {} release.addon = addon.name release.active = true release.phase = item.prerelease ? 'Pre-Release' : 'Release' if (uniquePhases[addon.name + release.phase]) { continue } uniquePhases[addon.name + release.phase] = true release.url = item.url release.version = item.name release.date = item.published_at const preExisting = await AddonRelease.findOneAndUpdate({addon: release.addon, phase: release.phase, version: release.version}, release, {upsert: true, new: false}).exec() if (!preExisting) { madeUpdate = true await AddonRelease.updateMany({addon: release.addon, version: {$ne: release.version}}, {$set: {active: false}}).exec() if (release.addon === 'WeakAuras-2' && release.phase === 'Release') { await updateWAData(release, item.assets) } else if (release.addon === 'MDT' && release.phase === 'Release') { await updateMDTData(release, item) } } } } else if (addon.host === 'gitlab') { release.addon = addon.name release.active = true release.phase = 'Release' for (let i = 0; i < response.data.length; i++) { if (response.data[i].name.match(/^v[\d.-]+$/)) { release.version = response.data[i].name release.url = 'https://gitlab.vuhdo.io/vuhdo/vuhdo/-/releases' + release.version release.date = response.data[i].released_at break } } if (!release.url) { return } const preExisting = await AddonRelease.findOneAndUpdate({addon: release.addon, phase: release.phase, version: release.version}, release, {upsert: true, new: false}).exec() if (!preExisting) { madeUpdate = true await AddonRelease.updateMany({addon: release.addon, version: {$ne: release.version}}, {$set: {active: false}}).exec() } } else if (addon.host === 'tukui') { release.addon = addon.name release.active = true release.phase = 'Release' release.url = response.data.web_url release.version = response.data.version release.date = new Date(response.data.lastupdate) if (addon.name === 'ElvUI') { var classicResponse = await axios.get('https://www.tukui.org/api.php?classic-addon=2') release.classicVersion = classicResponse.data.version } const preExisting = await AddonRelease.findOneAndUpdate({addon: release.addon, phase: release.phase, version: release.version, classicVersion: release.classicVersion}, release, {upsert: true, new: false}).exec() if (!preExisting) { // if a new release then de-activate the previous version(s) madeUpdate = true await AddonRelease.updateMany({addon: release.addon, phase: release.phase, version: {$ne: release.version}}, {$set: {active: false}}).exec() } } } catch (e) { console.log(e) throw 'Error fetching addon ' + addon.name } } if (madeUpdate) { const Latest = await AddonRelease.find({active: true}) await SiteData.set('LatestAddons', Latest) await updateDataCaches.queue('LatestAddons') } } async function SyncElastic(table) { console.log("SYNC ELASTIC", table) return await new Promise(async (done, reject) => { let count = 0 let doc switch (table){ case 'import': const cursorImports = WagoItem.find({_userId: {$exists: true}, expires_at: null}).cursor() doc = await cursorImports.next() while (doc) { count++ if (doc.deleted) { elastic.removeDoc('import', await doc._id) } else { elastic.addDoc('import', await doc.indexedImportData, true) } doc = await cursorImports.next() if(count%500 === 0) { console.log(table, count) } } break case 'code': const cursorCode = WagoItem.find({_userId: {$exists: true}, expires_at: null, hasCustomCode: true}).cursor() doc = await cursorCode.next() while (doc && count < 300000) { count++ let code = await doc.indexedCodeData if (code) { if (doc.deleted) { elastic.removeDoc('code', await doc._id) } else { elastic.addDoc('code', code, true) } } else { elastic.removeDoc('code', await doc._id) } doc = await cursorCode.next() if(count%500 === 0) { console.log(table, count) } } break case 'WagoItem': syncStream = WagoItem.synchronize() break case 'User': syncStream = User.synchronize() break default: return done() } }) } async function SyncMeili(table) { console.log("SYNC MEILI", table) // multi index is needed until sortStrategies or similar is added https://github.com/meilisearch/MeiliSearch/issues/730 const meiliBatchSize = 2000 switch (table){ case 'Imports:ToDo': const todoDocsWA = await redis.getJSON('meili:todo:wagoapp') || [] if (todoDocsWA.length) { let wagos = await WagoItem.find({_id: {$in: todoDocsWA}}) for (doc of wagos) { await meili.addDoc('weakauras', await doc.meiliWAData) } redis.setJSON('meili:todo:wagoapp', []) } break case 'Imports:Metrics': const lastIndexDate = await redis.get('meili:Metrics:Date') const wagoAppIndex = meili.index('weakauras') var metricsDocsWagoApp = [] if (!lastIndexDate) { redis.set('meili:Metrics:Date', new Date().toISOString()) } else { var cursor = WagoItem.aggregate([ {$match: {_meili: true}}, {$lookup: { from: 'wagofavorites', as: 'fave', let: {wagoID: '$_id'}, pipeline: [{ $match: { $expr: {$eq: ["$wagoID", "$$wagoID"]}, timestamp: {$gt: new Date(lastIndexDate)} }, }] }}, {$lookup: { from: 'viewsthisweeks', as: 'view', let: {wagoID: '$_id'}, pipeline: [{ $match: { $expr: {$eq: ["$wagoID", "$$wagoID"]}, viewed: {$gt: new Date(lastIndexDate)} }, }] }}, {$match: { $or: [ {"fave.0": {$exists: true}}, {"view.0": {$exists: true}} ] }} ]).cursor() for await (const doc of cursor) { let metrics = { id: doc._id, installs: doc.popularity.installed_count, stars: doc.popularity.favorite_count, views: doc.popularity.views, viewsThisWeek: doc.popularity.viewsThisWeek } // wago app if (doc._meiliWA) { metricsDocsWagoApp.push(metrics) } if (metricsDocsWagoApp.length >= meiliBatchSize) { await wagoAppIndex.updateDocuments(metricsDocsWagoApp) metricsDocsWagoApp = [] } } if (metricsDocsWagoApp.length) { await wagoAppIndex.updateDocuments(metricsDocsWagoApp) metricsDocsWagoApp = [] } redis.set('meili:Metrics:Date', new Date().toISOString()) } break case 'WagoApp': // complete DB sync var count = 0 var cursor = WagoItem.find({ type: {$regex: /WEAKAURA$/}, _userId: {$exists: true}, expires_at: null, $or: [{ hidden: false, restricted: false, private: false, encrypted: false, deleted: false, blocked: false }, { _meiliWA: true, $or: [ {hidden: true}, {restricted: true}, {private: true}, {encrypted: true}, {deleted: true}, {blocked: true} ] }] }).cursor() for (let doc = await cursor.next(); doc != null; doc = await cursor.next()) { count++ if (doc.hidden || doc.private || doc.encrypted || doc.restricted || doc.deleted || doc.blocked) { await meili.removeDoc('weakauras', await doc._id) doc._meiliWA = false await doc.save() } else { await meili.addDoc('weakauras', await doc.meiliWAData, true) if (!doc._meiliWA) { doc._meiliWA = true await doc.save() } } if (count%1000 == 0) { console.log('sync meili', count) } } break default: return } } const codeProcessVersion = ENUM.PROCESS_VERSION.WAGO async function CodeReview(customCode, doc) { try { let lc = await luacheck.run(customCode, doc.game) if (lc) { customCode = lc } } catch (e) { console.log('luacheck error', doc._id, e) } try { let metrics = await codeMetrics.run(customCode) if (metrics) { customCode = metrics } } catch (e) { console.log('codeMetrics error', doc._id, e) } return customCode } function TableReview (obj, data) { if (!data) { data = { dependencies: new Set() } } for (const [k, v] of Object.entries(obj)) { if (v && typeof v === 'object') { data = TableReview(v, data) } else if (typeof v === 'string' && (k === 'texture' || (k === 'sound' && (obj.do_sound || obj.sound_type === 'Play')))) { let s = v.replace(/\\{1,2}/g, '/') let m = s.match(/^Interface\/AddOns\/([^\/]+)\//i) if (m) { data.dependencies.add(m[1]) } } } return data } async function ProcessCode(data) { if (!data.id) return var doc = await WagoItem.lookup(data.id) var code = await WagoCode.lookup(data.id, data.version) if (!doc || !code || !code._id || doc.encrypted) { return } if (data.addon && Addons[data.addon]) { const addon = Addons[data.addon] if (addon && addon.addWagoData) { let meta = addon.addWagoData && addon.addWagoData(code, doc) if ((meta && meta.encode) || data.encode || !code.encoded) { code.encoded = await addon.encode(code.json.replace(/\\/g, '\\\\').replace(/"/g, '\\"').trim(), lua.runLua) } if (meta && meta.wago) { doc = meta.wago } } } else if (doc.type) { // match addon by type for (const addon of Object.values(Addons)) { if (doc.type.match(addon.typeMatch)) { let meta = addon.addWagoData && addon.addWagoData(code, doc) if ((meta && meta.encode) || data.encode || !code.encoded) { code.encoded = await addon.encode(code.json.replace(/\\/g, '\\\\').replace(/"/g, '\\"').trim(), lua.runLua) } if (meta && meta.wago) { doc = meta.wago } } } } let err try { switch (doc.type) { case 'SNIPPET': code.customCode = await CodeReview([{id: 'Lua', name: 'Snippet', lua: code.lua}], doc) break case 'WEAKAURA': case 'CLASSIC-WEAKAURA': case 'TBC-WEAKAURA': case 'PLATER': var json = JSON.parse(code.json) code.customCode = await CodeReview(getCode(json, doc.type), doc) const tableMetrics = TableReview(json) tableMetrics.dependencies = [...tableMetrics.dependencies] code.tableMetrics = tableMetrics break } } catch (e) { console.log(data, e) err = true } if (err) throw 'Code Processing Error' doc.blocked = false if (code.version > 1) { await WagoCode.updateMany({auraID: doc._id, _id: {$ne: code._id}}, {$set: {isLatestVersion: false}}) } code.isLatestVersion = true if (code.customCode && code.customCode.length) { doc.hasCustomCode = true code.customCode.forEach(c => { if (c.luacheck && c.luacheck.match(commonRegex.WeakAuraBlacklist)) { doc.blocked = true } }) } doc.codeProcessVersion = codeProcessVersion await doc.save() await code.save() if (doc._userId && !doc.deleted && !doc.expires_at) { elastic.addDoc('import', await doc.indexedImportData) } if (code.customCode.length) { return doc } return null } async function ProcessAllCode() { // return var cursor = WagoItem.find({ deleted: false, _userId: {$exists: true}, codeProcessVersion: {$lt: codeProcessVersion}, type: {$in: ['WEAKAURA', 'CLASSIC-WEAKAURA', 'TBC-WEAKAURA', 'PLATER']}, modified : { $gte: new Date(new Date().setDate(new Date().getDate()-180)) } }).cursor({batchSize:50}) let count = 0 console.log('-------------- CODE SYNC START ----------------') for await (const doc of cursor) { count++ if (doc.deleted) { await elastic.removeDoc('import', doc._id) } else { await ProcessCode({id: doc._id, type: doc.type}) if (count%1000 == 0) { console.log('process code', count) } } } console.log('-------------- CODE SYNC FINISHED ----------------') } function sortJSON(obj) { // if a regular array then its already sorted but still sort any child objects if (Array.isArray(obj)) { for (let i = 0; i < obj.length; i++) { if (obj[i] && typeof obj[i] == 'object') { obj[i] = sortJSON(obj[i]) } } return obj } // sort object as expected var sorted = {} var keys keys = Object.keys(obj) keys.sort(function(key1, key2) { if(key1 < key2) return -1 if(key1 > key2) return 1 return 0 }) for (var i in keys) { var key = keys[i] if (obj[key] && typeof obj[key] == 'object') { sorted[key] = sortJSON(obj[key]) } else { sorted[key] = obj[key] } } return sorted } async function updateWAData (release, assets) { const addonDir = path.resolve(__dirname, '../lua', 'addons' ,'WeakAuras', release.version) await mkdirp(addonDir) const zipFile = path.resolve(addonDir, 'WeakAuras.zip') const writer = require('fs').createWriteStream(zipFile) var axiosDownload = {method: 'GET', responseType: 'stream'} for (let i = 0; i < assets.length; i++) { if (assets[i].name.match(/WeakAuras-[\d.]+\.zip/)) { axiosDownload.url = assets[i].browser_download_url break } } if (!axiosDownload.url) { logError(e, 'Unable to find WeakAura download') return false } const response = await axios(axiosDownload) response.data.pipe(writer) await new Promise((resolve, reject) => { writer.on('finish', resolve) writer.on('error', reject) }) await decompress(zipFile, addonDir) const waLua = await fs.readFile(addonDir + '/WeakAuras/WeakAuras.lua', 'utf8') const versionMatch = waLua.match(/internalVersion\s?=\s?(\d+)/) if (versionMatch && versionMatch[1]) { const internalVersion = parseInt(versionMatch[1]) if (internalVersion) { SiteData.set('weakAuraInternalVersion', internalVersion) return } } // if we get here then internalVersion is not found or is not an integer logError(e, 'Unable to find WeakAura internalVersion') } async function updateMDTData (release, assets) { if (!assets.zipball_url) { logError('Unable to find MDT download', assets) return false } const addonDir = path.resolve(__dirname, '../lua', 'addons' ,'MDT', release.version) await mkdirp(addonDir) const zipFile = path.resolve(addonDir, 'MDT.zip') const writer = require('fs').createWriteStream(zipFile) var axiosDownload = {method: 'GET', responseType: 'stream', url: assets.zipball_url} const response = await axios(axiosDownload) response.data.pipe(writer) await new Promise((resolve, reject) => { writer.on('finish', resolve) writer.on('error', reject) }) await decompress(zipFile, addonDir) // get commit directory const commit = await fs.readdir(addonDir) var mdtData = await lua.BuildMDT_DungeonTable(`${addonDir}/${commit[1]}`) mdtData = JSON.parse(mdtData) // calculate dimensions mdtData.dungeonDimensions = [] mdtData.dungeonEnemies.forEach((enemies, mapID) => { // console.log(mapID) mdtData.dungeonDimensions.push({maxX: -9999999, minX: 9999999, maxY: -9999999, minY: 9999999}) if (!enemies) return enemies.forEach((creature) => { if (!creature || !creature.clones) return creature.clones.forEach((clone) => { if (!clone) { return } mdtData.dungeonDimensions[mapID].maxX = Math.max(mdtData.dungeonDimensions[mapID].maxX, clone.x) mdtData.dungeonDimensions[mapID].minX = Math.min(mdtData.dungeonDimensions[mapID].minX, clone.x) mdtData.dungeonDimensions[mapID].maxY = Math.max(mdtData.dungeonDimensions[mapID].maxY, clone.y) mdtData.dungeonDimensions[mapID].minY = Math.min(mdtData.dungeonDimensions[mapID].minY, clone.y) }) }) }) // save core data plus for each dungeon await SiteData.findByIdAndUpdate('mdtDungeonTable', {value: mdtData}, {upsert: true}).exec() await SiteData.findByIdAndUpdate('mdtAffixWeeks', {value: mdtData.affixWeeks}, {upsert: true}).exec() await cloudflare.zones.purgeCache(config.cloudflare.zoneID, {files: ['https://data.wago.io/data/mdtDungeonTable', 'https://data.wago.io/data/mdtAffixWeeks']}) for (let mapID = 0; mapID < mdtData.dungeonEnemies.length; mapID++) { let Obj = { affixWeeks: mdtData.affixWeeks, dungeonEnemies: mdtData.dungeonEnemies[mapID], enemyHash: md5(JSON.stringify(mdtData.dungeonEnemies[mapID])), mapPOIs: mdtData.mapPOIs[mapID], mapInfo: mdtData.mapInfo[mapID], dungeonTotalCount: mdtData.dungeonTotalCount[mapID], scaleMultiplier: mdtData.scaleMultiplier[mapID], dungeonSubLevels: mdtData.dungeonSubLevels[mapID], dungeonMaps: mdtData.dungeonMaps[mapID], dungeonDimensions: mdtData.dungeonDimensions[mapID] } if (mapID === 15) { Obj.freeholdCrews = mdtData.freeholdCrews } const currentHash = await SiteData.findById('mdtDungeonTable-' + mapID).exec() await SiteData.findByIdAndUpdate('mdtDungeonTable-' + mapID, {value: Obj}, {upsert: true}).exec() await cloudflare.zones.purgeCache(config.cloudflare.zoneID, {files: ['https://data.wago.io/data/mdtDungeonTable-' + mapID]}) // currentHash.value.enemyHash = null // force regenerate // if new portrait maps are required if ((!currentHash || currentHash.value.enemyHash !== Obj.enemyHash) && Obj.dungeonMaps && Obj.dungeonEnemies && Obj.dungeonEnemies.length) { try { console.log('make portrait map', mapID) for (let subMapID = 1; subMapID <= Object.keys(Obj.dungeonMaps).length; subMapID++) { if (mapID === 18) { await buildStaticMDTPortraits(Obj, mapID, subMapID, false, 1) await buildStaticMDTPortraits(Obj, mapID, subMapID, false, 2) await buildStaticMDTPortraits(Obj, mapID, subMapID, true, 1) await buildStaticMDTPortraits(Obj, mapID, subMapID, true, 2) } else { await buildStaticMDTPortraits(Obj, mapID, subMapID, false) await buildStaticMDTPortraits(Obj, mapID, subMapID, true) } break } logger({e_c: 'Generate MDT portrait maps', e_a: Obj.dungeonMaps['0'], e_n: Obj.dungeonMaps['0']}) } catch (e) { logError(e, 'Generating MDT portrait maps ' + Obj.dungeonMaps['0']) } } } return } async function buildStaticMDTPortraits(json, mapID, subMapID, teeming, faction) { // this is very finicky so only run it locally to generate the images if (config.env !== 'development') { return } const puppeteer = require('puppeteer-firefox') const mdtScale = 539 / 450 if (teeming) teeming = '-Teeming' else teeming = '' var imgName = `portraitMap-${mapID}-${subMapID}${teeming}` if (faction) { imgName = imgName + '-Faction' + faction } console.log('make map for', json.dungeonMaps["0"], imgName) var html = `<!DOCTYPE html> <html> <head> <script src="https://unpkg.com/[email protected]/konva.min.js"></script> <meta charset="utf-8"> <title>Konva Circle Demo</title> <style> body { margin: 0; padding: 0; overflow: hidden; background-color: #F0F0F0; } </style> </head> <body> <div id="container"></div> <script> var multiplier = 5 var stage = new Konva.Stage({ container: 'container', width: 1024 * multiplier, height: 768 * multiplier }); var layer = new Konva.Layer(); var enemyPortraits = new Image() enemyPortraits.src = 'https://wago.io/mdt/portraits-${mapID}.png?' enemyPortraits.crossOrigin = 'Anonymous' enemyPortraits.onload = () => { console.log(enemyPortraits.src, 'loaded') ` json.dungeonEnemies.forEach((creature, i) => { if (!creature || !creature.clones) return creature.clones.forEach((clone, j) => { if (clone && (!clone.sublevel || clone.sublevel === subMapID) && (!clone.teeming || (clone.teeming && teeming)) && (!clone.faction || (clone.faction === faction))) { html = html + ` var circle${i}_${j} = new Konva.Circle({ x: ${clone.x * mdtScale} * multiplier, y: ${clone.y * -mdtScale} * multiplier, radius: ${Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1) * (json.scaleMultiplier || 1)) / mdtScale} * multiplier, fillPatternX: ${(-Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1))) / mdtScale} * multiplier, fillPatternY: ${(-Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1))) / mdtScale} * multiplier, fillPatternImage: enemyPortraits, fillPatternOffset: ${getEnemyPortraitOffset(json.dungeonEnemies.length, i, 115)}, fillPatternRepeat: 'no-repeat', fillPatternScaleX: ${Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1)) / 64} * multiplier, fillPatternScaleY: ${Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1)) / 64} * multiplier, stroke: '${creature.isBoss ? 'gold' : 'black'}', strokeWidth: .5 * multiplier }) // add the shape to the layer layer.add(circle${i}_${j});` } }) return }) html = html + ` stage.add(layer); setTimeout(() => { var img = document.createElement('img') img.src = stage.toDataURL() img.id = 'img' document.body.appendChild(img) document.getElementById('container').remove() }, 1000) } </script> </body> </html>` // await fs.writeFile('../test.html', html, 'utf8') console.log('launch puppeteer') const browser = await puppeteer.launch({args: [ '--disable-web-security', // ignore cors errors ]}) const page = await browser.newPage() await page.setCacheEnabled(false) await page.setContent(html) page.on('console', msg => { for (let i = 0; i < msg.args().length; ++i) console.log(`${i}: ${msg.args()[i]}`); }); await page.waitForSelector('img', {timeout: 120000}) const base64 = await page.evaluate(() => { return document.getElementById('img').src }) await browser.close() const buffer = Buffer.from(base64.replace(/^data:image\/\w+;base64,/, ""), 'base64') await image.saveMdtPortraitMap(buffer, imgName) return } function getEnemyPortraitOffset (numCreatures, creatureIndex, size) { var row = 0 size = size || 36 if (creatureIndex >= Math.ceil(numCreatures / 2)) { row++ } var o = {x: ((creatureIndex) - (Math.ceil(numCreatures / 2) * row)) * size, y: row * size} return `{x: ${o.x}, y: ${o.y}}` } /* UpdateTwitchSubs: async (req) => { const users = await User.find({"roles.pro_subscriber": true, "twitch.refreshToken": {$exists: true}}).exec() users.forEach(async (user) => { try { var refresh = await axios.post('https://id.twitch.tv/oauth2/token', querystring.stringify({ client_id: config.auth.twitch.clientID, client_secret: config.auth.twitch.clientSecret, grant_type: 'refresh_token', refresh_token: user.twitch.refreshToken }), { headers: { "Content-Type": "application/x-www-form-urlencoded" } }) var validate = await axios.get('https://id.twitch.tv/oauth2/validate', {headers: {Authorization: 'OAuth ' + refresh.data.access_token }}) if (validate.data.user_id === user.twitch.id) { user.twitch.refreshToken = refresh.data.refresh_token await user.save() var subs = await axios.get('https://api.twitch.tv/helix/subscriptions', {params: {broadcaster_id: user.twitch.id}, headers: {Authorization: 'Bearer ' + refresh.data.access_token }}) if (subs.data.data) { var currentSubs = [] subs.data.data.forEach(async (subscriber) => { var subUser = await User.findOne({"twitch.id": subscriber.user_id}).exec() if (subUser) { currentSubs.push(subUser._id) if (subUser.twitch.subscribedTo.indexOf(user.twitch.id) === -1) { subUser.twitch.subscribedTo.push(user.twitch.id) await subUser.save() } } }) // remove old subs if (currentSubs.length) { var exSubs = await User.find({"twitch.subscribedTo": user.twitch.id, _id: {$nin: currentSubs}}).exec() exSubs.forEach(async (exSubscriber) => { var i = exSubscriber.twitch.subscribedTo.indexOf(user.twitch.id) exSubscriber.twitch.subscribedTo.splice(i, 1) await exSubscriber.save() }) } } } else { // no longer allowing link between wago and twitch user.twitch.refreshToken = null await user.save() } } catch (e) { if (e.response && e.response.status === 401 && user) { user.twitch.refreshToken = null await user.save() } req.trackError(e, 'Cron: UpdateTwitchSubs') } }) } } */ /* // TODO stats page function generateStats(res) { const startDate = new Date(1463788800000) // May 21 2016 async.series({ WeakAuras: (done) => { Stats.findOne({name: 'Total WeakAuras'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "WEAKAURAS2", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total WeakAuras', date: dDate}, {name: 'Total WeakAuras', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, ElvUI: (done) => { Stats.findOne({name: 'Total ElvUI'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "ELVUI", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total ElvUI', date: dDate}, {name: 'Total ElvUI', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, MDT: (done) => { Stats.findOne({name: 'Total MDT Routes'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "MDT", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total MDT Routes', date: dDate}, {name: 'Total MDT Routes', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, TotalRP: (done) => { Stats.findOne({name: 'Total TotalRP'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "TOTALRP3", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total TotalRP', date: dDate}, {name: 'Total TotalRP', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, VuhDo: (done) => { Stats.findOne({name: 'Total VuhDo'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "VUHDO", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total VuhDo', date: dDate}, {name: 'Total VuhDo', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, Companion: (done) => { let date = startDate let today = new Date while (date < today) { let dDate = new Date(date) WagoFavorites.distinct('appID', {type: 'Install', timestamp: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((IDs) => { var num = IDs.length Stats.findOneAndUpdate({name: 'WeakAura Companion Installs', date: dDate}, {name: 'WeakAura Companion Installs', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }, WACode: (done) => { Stats.findOne({name: 'WeakAura Region group'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } async.whilst( () => { return date < today }, (cb) => { let dDate = new Date(date) let countAuthorOptions = 0 let countBuffTrigger2 = 0 let countTriggers = 0 let countTriggerCustomCode = 0 let countTriggerCustomCodeEveryFrame = 0 let countRegionTypes = {group:0, dynamicgroup:0, aurabar:0, icon:0, text:0, model:0, model:0, texture:0, progresstexture:0, stopmotion:0} WagoCode.find({updated: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((wa) => { async.forEach(wa, (code, next) => { // confirm import is a weakaura WagoItem.findOne({_id: code.auraID, type: 'WEAKAURA'}).then((aura) => { if (aura) { var json = JSON.parse(code.json) if (!json.d) { return next() } if (!json.c) { json.c = [json.d] } else { countRegionTypes[json.d.regionType]++ } for (let i = 0; i < json.c.length; i++) { if (!json.c[i]) continue countRegionTypes[json.c[i].regionType]++ // author options feature if (json.c[i].authorOptions && json.c[i].authorOptions.length) { countAuthorOptions++ } // bufftrigger2 feature if (json.c[i].triggers && json.c[i].triggers['1']) { countBuffTrigger2++ } // count triggers with custom code if (json.c[i].trigger) { countTriggers++ if (json.c[i].trigger.type === 'custom' && json.c[i].trigger.custom) { countTriggerCustomCode++ if (json.c[i].trigger.check === 'update') { countTriggerCustomCodeEveryFrame++ } } } if (json.c[i].additional_triggers && json.c[i].additional_triggers.length) { for (let k = 0; k < json.c[i].additional_triggers.length; k++) { countTriggers++ if (json.c[i].additional_triggers[k].type === 'custom' && json.c[i].additional_triggers[k].custom) { countTriggerCustomCode++ if (json.c[i].trigger.check === 'update') { countTriggerCustomCodeEveryFrame++ } } } } if (json.c[i].triggers) { for (var k in json.c[i].triggers) { if (parseInt(k) && json.c[i].triggers[k].trigger) { countTriggers++ if (json.c[i].triggers[k].trigger.type === 'custom' && json.c[i].triggers[k].trigger.custom) { countTriggerCustomCode++ if (json.c[i].triggers[k].trigger.check === 'update') { countTriggerCustomCodeEveryFrame++ } } } } } } } next() }) }, () => { Stats.findOneAndUpdate({name: 'WeakAura Imports with Author Options Feature', date: dDate}, {name: 'WeakAura Imports with Author Options Feature', date: dDate, value: countAuthorOptions}, {upsert: true}).exec() Stats.findOneAndUpdate({name: 'WeakAura Imports with BuffTrigger2 Feature', date: dDate}, {name: 'WeakAura Imports with BuffTrigger2 Feature', date: dDate, value: countBuffTrigger2}, {upsert: true}).exec() // Stats.findOneAndUpdate({name: 'WeakAura Triggers', date: dDate}, {name: 'WeakAura Triggers', date: dDate, value: countTriggers}, {upsert: true}).exec() Stats.findOneAndUpdate({name: 'WeakAura Triggers with Custom Code', date: dDate}, {name: 'WeakAura Triggers with Custom Code', date: dDate, value: countTriggerCustomCode}, {upsert: true}).exec() Stats.findOneAndUpdate({name: 'WeakAura Triggers with Custom Code Updating Every Frame', date: dDate}, {name: 'WeakAura Triggers with Custom Code Updating Every Frame', date: dDate, value: countTriggerCustomCodeEveryFrame}, {upsert: true}).exec() Object.keys(countRegionTypes).forEach((region) => { if (region && region !== 'undefined') { console.log(region, countRegionTypes[region]) Stats.findOneAndUpdate({name: 'WeakAura Region ' + region, date: dDate}, {name: 'WeakAura Region ' + region, date: dDate, value: countRegionTypes[region]}, {upsert: true}).exec() } }) date = date.nextWeek() cb() }) }) }, () => { done() }) }) } }, () => { res.send({done: true}) }) } Date.prototype.nextWeek = function() { var date = new Date(this.valueOf()) date.setDate(date.getDate() + 7) return date } */
backend/api/helpers/tasks.js
const battlenet = require('./battlenet') const cloudflare = require('cloudflare')({token: config.cloudflare.dnsToken}) const decompress = require('@atomic-reactor/decompress') const image = require('./image') const lua = require('./lua') const md5 = require('md5') const mkdirp = require('mkdirp') const path = require('path') const updateDataCaches = require('../../middlewares/updateLocalCache') const getCode = require('./code-detection/get-code') const luacheck = require('./luacheck') const codeMetrics = require('./codeMetrics') const ENUM = require('../../middlewares/enum') const logger = require('../../middlewares/matomo') const logError = require('../../middlewares/matomoErrors') module.exports = async (task, data) => { try { switch (task) { case 'UpdatePatreonAccounts': return await UpdatePatreonAccounts() case 'UpdateWeeklyMDT': return await UpdateWeeklyMDT() case 'ComputeStatistics': return await ComputeStatistics() case 'DiscordMessage': return await DiscordMessage(data) case 'UpdateValidCharacters': return await UpdateValidCharacters() case 'UpdateGuildMembership': return await UpdateGuildMembership() case 'UpdateLatestAddonReleases': return await UpdateLatestAddonReleases() case 'UpdateTopLists': return await UpdateTopLists() case 'UpdateTwitchStatus': return await UpdateTwitchStatus(data) case 'UpdateWagoOfTheMoment': return await UpdateWagoOfTheMoment() case 'UpdateActiveUserCount': return await UpdateActiveUserCount() case 'UpdateLatestNews': return await UpdateLatestNews() case 'SyncElastic': return await SyncElastic(data.table) case 'SyncMeili': return await SyncMeili(data.table) case 'ProcessCode': return await ProcessCode(data) case 'ProcessAllCode': return await ProcessAllCode() case 'CleanTaskQueue': return taskQueue.clean(10000) default: throw {name: 'Unknown task', message: 'Unknown task ' + task} } } catch (e) { console.log(e) logError(e, 'Task ', task) } } async function UpdateWagoOfTheMoment () { const data = await WagoItem.randomOfTheMoment() await SiteData.findOneAndUpdate({_id: 'WagoOfTheMoment'}, {value: data}, {upsert: true}).exec() await updateDataCaches.queue('WagoOfTheMoment') } async function UpdateTwitchStatus (channel) { var twitchToken = await redis.get('twitch:appToken') if (!twitchToken) { const getToken = await axios.post(`https://id.twitch.tv/oauth2/token?client_id=${config.auth.twitch.clientID}&client_secret=${config.auth.twitch.clientSecret}&grant_type=client_credentials`) if (getToken && getToken.data && getToken.data.access_token) { twitchToken = getToken.data.access_token redis.set('twitch:appToken', twitchToken, 'EX', getToken.data.expires_in) } } var streams = [] var status = {} if (!channel || typeof channel !== 'string') { const cfg = await SiteData.get('EmbeddedStream') streams = cfg.streams } for (let i = 0; i < streams.length; i++) { let channel = streams[i].channel const req = await axios.get(`https://api.twitch.tv/helix/streams?user_login=${channel}`, { headers: { 'client-id': config.auth.twitch.clientID, 'Authorization': 'Bearer '+ twitchToken } }) await redis.set(`twitch:${channel}:live`, (req.data.data.length > 0)) status[channel] = (req.data.data.length > 0) } const streamers = await Streamer.find({}) var getStreams = [] for (let i = 0; i < streamers.length; i++) { getStreams.push(`user_login=${streamers[i].name}&`) } var twitchStreamers = [] while (getStreams.length) { let twitchUserQuery = getStreams.splice(0, 20) let twitchReq = await axios.get(`https://api.twitch.tv/helix/streams?${twitchUserQuery.join('')}`, { headers: { 'client-id': config.auth.twitch.clientID, 'Authorization': 'Bearer '+ twitchToken } }) if (twitchReq && twitchReq.data && twitchReq.data.data) { twitchStreamers = twitchStreamers.concat(twitchReq.data.data) } } for (let i = 0; i < streamers.length; i++) { for (let k = 0; k < twitchStreamers.length; k++) { if (twitchStreamers[k].user_name.toLowerCase() === streamers[i].name.toLowerCase()) { streamers[i].online = new Date(twitchStreamers[k].started_at) streamers[i].offline = null streamers[i].game = twitchStreamers[k].game_name streamers[i].title = twitchStreamers[k].title streamers[i].viewers = twitchStreamers[k].viewer_count - (streamers[i].wagoViewers || 0) streamers[i].name = twitchStreamers[k].user_name await streamers[i].save() streamers[i].ok = true await redis.set(`twitch:${streamers[i].name}:live`, 1) } } } for (let i = 0; i < streamers.length; i++) { if (!streamers[i].ok && streamers[i].online) { streamers[i].online = null streamers[i].offline = Date.now() streamers[i].viewers = 0 streamers[i].wagoViewers = 0 await streamers[i].save() await redis.del(`twitch:${streamers[i].name}:live`) await redis2.zremrangebyscore(`allEmbeds:${streamers[i].name}`, '-inf', '+inf') } } return status } async function UpdateLatestNews () { const docs = await Blog.find({publishStatus: 'publish'}).sort('-date').limit(1).populate('_userId') var news = [] docs.forEach((item) => { news.push({ content: item.content, date: item.date, format: item.format, title: item.title, _id: item._id, user: { username: item._userId.account.username, css: item._userId.roleclass } }) }) await SiteData.findOneAndUpdate({_id: 'LatestNews'}, {value: news}, {upsert: true}).exec() await updateDataCaches.queue('LatestNews') } async function UpdatePatreonAccounts () { nextURL = 'https://www.patreon.com/api/oauth2/v2/campaigns/562591/members?include=currently_entitled_tiers,user&fields%5Btier%5D=title' while (nextURL) { var response = await axios.get(nextURL, {headers: {Authorization: 'Bearer '+ config.auth.patreon.creatorToken}}) var patrons = response.data.data for (let i = 0; i < patrons.length; i++) { if (!patrons[i] || !patrons[i].relationships || !patrons[i].relationships.user || !patrons[i].relationships.user.data || !patrons[i].relationships.user.data.id) { continue } var user = await User.findOne({"patreon.id": patrons[i].relationships.user.data.id}) if (!user) { continue } var tier try { tier = patrons[i].relationships.currently_entitled_tiers.data[0].id } catch (e) { tier = 0 } // subscriber 1385924 // gold sub 1386010 user.roles.subscriber = tier > 0 user.roles.gold_subscriber = tier > 1385924 // user.roles.guild_subscriber = (!patrons[i].attributes.declined_since && patrons[i].attributes.amount_cents >= 1500) await user.save() } if (response.data.links && response.data.links.next) { nextURL = response.data.links.next } else { nextURL = null } } return } async function UpdateWeeklyMDT () { await battlenet.updateMDTWeeks() } async function UpdateTopLists () { var data = [] // favorites var imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorites All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite WeakAuras All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'CLASSIC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite Classic WeakAuras All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'TBC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite TBC WeakAuras All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'PLATER', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite Plater All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'TOTALRP3', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite Total RP All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'VUHDO', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite VuhDo All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'ELVUI', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.favorite_count").select('_id name popularity.favorite_count').limit(15).exec() data.push({title: 'Favorite ElvUI All Time', imports: imports.map(x => { return {count: x.popularity.favorite_count, display: '[-count-] star', name: x.name, slug: x.slug} }), lastOfSection: true }) // popular imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular WeakAuras This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'CLASSIC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular Classic WeakAuras This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'TBC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular TBC WeakAuras This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'PLATER', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular Plater This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'TOTALRP3', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular Total RP This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'VUHDO', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular VuhDo This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'ELVUI', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.viewsThisWeek").select('_id name popularity.viewsThisWeek').limit(15).exec() data.push({title: 'Popular ElvUI This Week', imports: imports.map(x => { return {count: x.popularity.viewsThisWeek, display: '[-count-] view', name: x.name, slug: x.slug} }), lastOfSection: true }) // installed imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.installed_count").select('_id name popularity.installed_count').limit(15).exec() data.push({title: 'Installed', imports: imports.map(x => { return {count: x.popularity.installed_count, display: '[-count-] install', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.installed_count").select('_id name popularity.installed_count').limit(15).exec() data.push({title: 'Installed WeakAuras', imports: imports.map(x => { return {count: x.popularity.installed_count, display: '[-count-] install', name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({type: 'CLASSIC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.installed_count").select('_id name popularity.installed_count').limit(15).exec() data.push({title: 'Installed Classic WeakAuras', imports: imports.map(x => { return {count: x.popularity.installed_count, display: '[-count-] install', name: x.name, slug: x.slug} }), lastOfSection: true }) imports = await WagoItem.find({type: 'TBC-WEAKAURA', deleted: false, hidden: false, private: false, restricted: false, encrypted: false}).sort("-popularity.installed_count").select('_id name popularity.installed_count').limit(15).exec() data.push({title: 'Installed TBC WeakAuras', imports: imports.map(x => { return {count: x.popularity.installed_count, display: '[-count-] install', name: x.name, slug: x.slug} }), lastOfSection: true }) // new and updated imports imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false, $where: "this.created.getTime() != this.modified.getTime()"}).sort({"modified": -1}).select('_id name modified').limit(15).exec() data.push({title: 'Recently Updated', imports: imports.map(x => { return {date: true, display: x.modified, name: x.name, slug: x.slug} }) }) imports = await WagoItem.find({deleted: false, hidden: false, private: false, restricted: false, encrypted: false, $where: "this.created.getTime() == this.modified.getTime()"}).sort({"created": -1}).select('_id name created').limit(15).exec() data.push({title: 'Newest Imports', imports: imports.map(x => { return {date: true, display: x.created, name: x.name, slug: x.slug} }) }) // save data await SiteData.findOneAndUpdate({_id: 'TopLists'}, {value: data}, {upsert: true}).exec() await updateDataCaches.queue('TopLists') } async function DiscordMessage (data) { if (global.discordBot) { const author = await User.findById(data.author) const wago = await WagoItem.lookup(data.wago) if (data.type === 'comment') { const sendTo = await User.findOne({_id: data.to, "discord.options.messageOnComment": true}).select('discord').exec() if (sendTo && !author._id.equals(sendTo._id)) { discordBot.postComment(author, sendTo, wago, data.message) } } else if (data.type === 'update') { const stars = await WagoFavorites.find({type: 'Star', wagoID: wago._id}) for (let i = 0; i < stars.length; i++) { const sendTo = await User.findOne({_id: stars[i].userID, "discord.options.messageOnFaveUpdate": true}).select('discord').exec() if (sendTo && !author._id.equals(sendTo._id)) { discordBot.postUpdate(author, sendTo, wago) } } } } } async function UpdateValidCharacters () { const fourWeeksAgo = new Date() fourWeeksAgo.setDate(fourWeeksAgo.getDate() - 28) const users = await User.find({"battlenet.characters.1": {$exists: true}, $or: [{"battlenet.updateDate": {$exists: false}}, {"battlenet.updateDate": {$lt: fourWeeksAgo}}]}).limit(50).exec() for (let i = 0; i < users.length; i++) { var validChars = [] for (let k = 0; k < users[i].battlenet.characters.length; k++) { const status = await battlenet.lookupCharacterStatus(users[i].battlenet.characters[k].region, users[i].battlenet.characters[k].realm, users[i].battlenet.characters[k].name) if (status.error || !status.is_valid || (users[i].battlenet.characters[k].bnetID && users[i].battlenet.characters[k].bnetID != status.id)) { continue } else if (!users[i].battlenet.characters[k].bnetID) { users[i].battlenet.characters[k].bnetID = status.id } validChars.push(users[i].battlenet.characters[k]) } users[i].battlenet.updateDate = new Date() users[i].battlenet.characters = validChars await users[i].save() } } async function UpdateGuildMembership () { function guildRankSort(a, b) { if (a.rank > b.rank) return -1 else if (a.rank < b.rank) return 1 return 0 } function escapeRegExp(string) { return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string } var guildsChecked = [] const users = await User.find({"battlenet.guilds.1": {$exists: true}, $or: [{"roles.gold_subscriber": true}, {"roles.pro_subscriber": true}, {"roles.ambassador": true}, {"roles.developer": true}, {"roles.community_leader": true}, {"roles.artContestWinnerAug2018": true}]}).exec() const updateGuild = async function (guildKey) { const accountIdsInGuild = [] const accountsInGuild = [] if (guildKey.match(/@\d$/, '') || guildsChecked.indexOf(guildKey) >= 0) { return Promise.resolve() } guildsChecked.push(guildKey) const [region, realm, guildname] = guildKey.split(/@/g) const guild = await battlenet.lookupGuild(region, realm, guildname) if (!guild || !guild.members) { // if unknown error (likely 500) return Promise.resolve() } else if (guild.error === 'NOGUILD') { // if this guild no longer exists, remove all members from it let exGuild = await User.find({"battlenet.guilds": guildKey}).exec() let deletePromise = new Promise(async (deleteDone) => { exGuild.forEach(async (exMember) => { let re = new RegExp('^' + guildKey + '(@\\d)?$') for (let g = exMember.battlenet.guilds.length - 1; g >= 0; g--) { if (exMember.battlenet.guilds[g].match(re)) { exMember.battlenet.guilds.splice(g, 1) } } await exMember.save() return deleteDone() }) }) await deletePromise return Promise.resolve() } else { // guild found! Match all wago users with guild guild.members.sort(guildRankSort) for (let j = 0; j < guild.members.length; j++) { let memberUser = await User.findOne({"battlenet.characters.region": region, "battlenet.characters.name": guild.members[j].character.name}) if (!memberUser) { continue } var memberID = memberUser._id.toString() if (accountsInGuild.indexOf(memberID) === -1) { accountIdsInGuild.push(memberUser._id) accountsInGuild.push(memberID) } // if new member to guild if (memberUser.battlenet.guilds.indexOf(guildKey) === -1) { memberUser.battlenet.guilds.push(guildKey) for (let k = guild.members[j].rank; k <= 9; k++) { memberUser.battlenet.guilds.push(`${guildKey}@${k}`) } } // else they are already in guild, but since they may have changed ranks // remove everything and re-add all current ranks else { let re = new RegExp('^' + escapeRegExp(guildKey) + '@\\d$') for (let g = 0; g < memberUser.battlenet.guilds.length; g++) { if (memberUser.battlenet.guilds[g].match(re)) { memberUser.battlenet.guilds.splice(g, 1) break } } for (let k = guild.members[j].rank; k <= 9; k++) { memberUser.battlenet.guilds.push(`${guildKey}@${k}`) } } if (guildKey === 'eu@twisting-nether@Method') { memberUser.roles.methodRaider = (guild.members[j].rank <= 4) } memberUser.battlenet.guilds = [...new Set(memberUser.battlenet.guilds)] await memberUser.save() } // remove old members let exGuild = await User.find({"battlenet.guilds": guildKey, _id: {$nin: accountIdsInGuild}}).exec() for (let d = 0; d < exGuild.length; d++) { let re = new RegExp('^' + guildKey + '(@\\d)?$') for (let g = exGuild[d].battlenet.guilds.length - 1; g >= 0; g--) { if (exGuild[d].battlenet.guilds[g].match(re)) { exGuild[d].battlenet.guilds.splice(g, 1) } } if (guildKey === 'eu@twisting-nether@Method') { exGuild[d].roles.methodRaider = false } await exGuild[d].save() } } } for (let i = 0; i < users.length; i++) { for (let j = 0; j < users[i].battlenet.guilds.length; j++) { await updateGuild(users[i].battlenet.guilds[j]) } } await updateGuild('eu@twisting-nether@Method') } async function ComputeStatistics () { // calc views this week const viewedDocs = await ViewsThisWeek.aggregate([{$group: { _id: '$wagoID', views: { $sum: 1 }}}]).exec() let totalImports = 0 let totalSum = 0 let totalSquared = 0 while (viewedDocs.length > 0) { // process in batches of 500 let items = viewedDocs.splice(0, 500) let ops = [] items.forEach((wago) => { ops.push({ updateOne: { filter: {_id: wago._id}, update: {'popularity.viewsThisWeek': wago.views} } }) if (wago.views > 5) { totalImports++ totalSum += wago.views totalSquared += wago.views * wago.views } }) await WagoItem.bulkWrite(ops) } let mean = totalSum / totalImports let standardDeviation = Math.sqrt((totalSquared - ((totalSum * totalSum) / totalImports)) / (totalSum - 1)) await redis.set('stats:standardDeviation:views', standardDeviation || 0) await redis.set('stats:mean:views', mean || 0) const recentDate = new Date() recentDate.setMonth(recentDate.getDate() - 18) // calc installs this month totalImports = 0 totalSum = 0 totalSquared = 0 const installDocs = await WagoFavorites.aggregate([ {$match: {type: 'Install', timestamp: {$gt: recentDate}}}, {$group: { _id: '$wagoID', installs: { $sum: 1 }}} ]).exec() while (installDocs.length > 0) { // process in batches of 500 var items = installDocs.splice(0, 500) items.forEach((wago) => { if (wago.installs > 5) { totalImports++ totalSum += wago.installs totalSquared += wago.installs * wago.installs } }) } mean = totalSum / totalImports standardDeviation = Math.sqrt((totalSquared - ((totalSum * totalSum) / totalImports)) / (totalSum - 1)) await redis.set('stats:standardDeviation:installs', standardDeviation || 0) await redis.set('stats:mean:installs', mean || 0) // calc stars this month totalImports = 0 totalSum = 0 totalSquared = 0 const starDocs = await WagoFavorites.aggregate([ {$match: {type: 'Star', timestamp: {$gt: recentDate}}}, {$group: { _id: '$wagoID', stars: { $sum: 1 }}} ]).exec() while (starDocs.length > 0) { // process in batches of 500 var items = starDocs.splice(0, 500) items.forEach((wago) => { if (wago.stars > 5) { totalImports++ totalSum += wago.stars totalSquared += wago.stars * wago.stars } }) } mean = totalSum / totalImports standardDeviation = Math.sqrt((totalSquared - ((totalSum * totalSum) / totalImports)) / (totalSum - 1)) await redis.set('stats:standardDeviation:stars', standardDeviation || 0) await redis.set('stats:mean:stars', mean || 0) } async function UpdateLatestAddonReleases () { const addons = [ {name: 'WeakAuras-2', host: 'github', url: 'https://api.github.com/repos/weakAuras/WeakAuras2/releases'}, {name: 'VuhDo', host: 'gitlab', url: 'https://gitlab.vuhdo.io/api/v4/projects/13/releases'}, {name: 'ElvUI', host: 'tukui', url: 'https://www.tukui.org/api.php?ui=elvui'}, {name: 'MDT', host: 'github', url: 'https://api.github.com/repos/Nnoggie/MythicDungeonTools/releases'}, ] var madeUpdate = false for (let addon of addons) { var release = {} try { const response = await axios.get(addon.url) if (addon.host === 'github') { var uniquePhases = {} for (const item of response.data) { release = {} release.addon = addon.name release.active = true release.phase = item.prerelease ? 'Pre-Release' : 'Release' if (uniquePhases[addon.name + release.phase]) { continue } uniquePhases[addon.name + release.phase] = true release.url = item.url release.version = item.name release.date = item.published_at const preExisting = await AddonRelease.findOneAndUpdate({addon: release.addon, phase: release.phase, version: release.version}, release, {upsert: true, new: false}).exec() if (!preExisting) { madeUpdate = true await AddonRelease.updateMany({addon: release.addon, version: {$ne: release.version}}, {$set: {active: false}}).exec() if (release.addon === 'WeakAuras-2' && release.phase === 'Release') { await updateWAData(release, item.assets) } else if (release.addon === 'MDT' && release.phase === 'Release') { await updateMDTData(release, item) } } } } else if (addon.host === 'gitlab') { release.addon = addon.name release.active = true release.phase = 'Release' for (let i = 0; i < response.data.length; i++) { if (response.data[i].name.match(/^v[\d.-]+$/)) { release.version = response.data[i].name release.url = 'https://gitlab.vuhdo.io/vuhdo/vuhdo/-/releases' + release.version release.date = response.data[i].released_at break } } if (!release.url) { return } const preExisting = await AddonRelease.findOneAndUpdate({addon: release.addon, phase: release.phase, version: release.version}, release, {upsert: true, new: false}).exec() if (!preExisting) { madeUpdate = true await AddonRelease.updateMany({addon: release.addon, version: {$ne: release.version}}, {$set: {active: false}}).exec() } } else if (addon.host === 'tukui') { release.addon = addon.name release.active = true release.phase = 'Release' release.url = response.data.web_url release.version = response.data.version release.date = new Date(response.data.lastupdate) if (addon.name === 'ElvUI') { var classicResponse = await axios.get('https://www.tukui.org/api.php?classic-addon=2') release.classicVersion = classicResponse.data.version } const preExisting = await AddonRelease.findOneAndUpdate({addon: release.addon, phase: release.phase, version: release.version, classicVersion: release.classicVersion}, release, {upsert: true, new: false}).exec() if (!preExisting) { // if a new release then de-activate the previous version(s) madeUpdate = true await AddonRelease.updateMany({addon: release.addon, phase: release.phase, version: {$ne: release.version}}, {$set: {active: false}}).exec() } } } catch (e) { console.log(e) throw 'Error fetching addon ' + addon.name } } if (madeUpdate) { const Latest = await AddonRelease.find({active: true}) await SiteData.set('LatestAddons', Latest) await updateDataCaches.queue('LatestAddons') } } async function SyncElastic(table) { console.log("SYNC ELASTIC", table) return await new Promise(async (done, reject) => { let count = 0 let doc switch (table){ case 'import': const cursorImports = WagoItem.find({_userId: {$exists: true}, deleted: false, expires_at: null}).cursor() doc = await cursorImports.next() while (doc && count < 300000) { count++ elastic.addDoc('import', await doc.indexedImportData, true) doc = await cursorImports.next() if(count%500 === 0) { console.log(table, count) } } break case 'code': const cursorCode = WagoItem.find({_userId: {$exists: true}, deleted: false, expires_at: null, hasCustomCode: true}).cursor() doc = await cursorCode.next() while (doc && count < 300000) { count++ let code = await doc.indexedCodeData if (code) { elastic.addDoc('code', code, true) } doc = await cursorCode.next() if(count%500 === 0) { console.log(table, count) } } break case 'WagoItem': syncStream = WagoItem.synchronize() break case 'User': syncStream = User.synchronize() break default: return done() } }) } async function SyncMeili(table) { console.log("SYNC MEILI", table) // multi index is needed until sortStrategies or similar is added https://github.com/meilisearch/MeiliSearch/issues/730 const meiliBatchSize = 2000 switch (table){ case 'Imports:ToDo': const todoDocsWA = await redis.getJSON('meili:todo:wagoapp') || [] if (todoDocsWA.length) { let wagos = await WagoItem.find({_id: {$in: todoDocsWA}}) for (doc of wagos) { await meili.addDoc('weakauras', await doc.meiliWAData) } redis.setJSON('meili:todo:wagoapp', []) } break case 'Imports:Metrics': const lastIndexDate = await redis.get('meili:Metrics:Date') const wagoAppIndex = meili.index('weakauras') var metricsDocsWagoApp = [] if (!lastIndexDate) { redis.set('meili:Metrics:Date', new Date().toISOString()) } else { var cursor = WagoItem.aggregate([ {$match: {_meili: true}}, {$lookup: { from: 'wagofavorites', as: 'fave', let: {wagoID: '$_id'}, pipeline: [{ $match: { $expr: {$eq: ["$wagoID", "$$wagoID"]}, timestamp: {$gt: new Date(lastIndexDate)} }, }] }}, {$lookup: { from: 'viewsthisweeks', as: 'view', let: {wagoID: '$_id'}, pipeline: [{ $match: { $expr: {$eq: ["$wagoID", "$$wagoID"]}, viewed: {$gt: new Date(lastIndexDate)} }, }] }}, {$match: { $or: [ {"fave.0": {$exists: true}}, {"view.0": {$exists: true}} ] }} ]).cursor() for await (const doc of cursor) { let metrics = { id: doc._id, installs: doc.popularity.installed_count, stars: doc.popularity.favorite_count, views: doc.popularity.views, viewsThisWeek: doc.popularity.viewsThisWeek } // wago app if (doc._meiliWA) { metricsDocsWagoApp.push(metrics) } if (metricsDocsWagoApp.length >= meiliBatchSize) { await wagoAppIndex.updateDocuments(metricsDocsWagoApp) metricsDocsWagoApp = [] } } if (metricsDocsWagoApp.length) { await wagoAppIndex.updateDocuments(metricsDocsWagoApp) metricsDocsWagoApp = [] } redis.set('meili:Metrics:Date', new Date().toISOString()) } break case 'WagoApp': // complete DB sync var count = 0 var cursor = WagoItem.find({ type: {$regex: /WEAKAURA$/}, _userId: {$exists: true}, expires_at: null, $or: [{ hidden: false, restricted: false, private: false, encrypted: false, deleted: false, blocked: false }, { _meiliWA: true, $or: [ {hidden: true}, {restricted: true}, {private: true}, {encrypted: true}, {deleted: true}, {blocked: true} ] }] }).cursor() for (let doc = await cursor.next(); doc != null; doc = await cursor.next()) { count++ if (doc.hidden || doc.private || doc.encrypted || doc.restricted || doc.deleted || doc.blocked) { await meili.removeDoc('weakauras', await doc._id) doc._meiliWA = false await doc.save() } else { await meili.addDoc('weakauras', await doc.meiliWAData, true) if (!doc._meiliWA) { doc._meiliWA = true await doc.save() } } if (count%1000 == 0) { console.log('sync meili', count) } } break default: return } } const codeProcessVersion = ENUM.PROCESS_VERSION.WAGO async function CodeReview(customCode, doc) { try { let lc = await luacheck.run(customCode, doc.game) if (lc) { customCode = lc } } catch (e) { console.log('luacheck error', doc._id, e) } try { let metrics = await codeMetrics.run(customCode) if (metrics) { customCode = metrics } } catch (e) { console.log('codeMetrics error', doc._id, e) } return customCode } function TableReview (obj, data) { if (!data) { data = { dependencies: new Set() } } for (const [k, v] of Object.entries(obj)) { if (v && typeof v === 'object') { data = TableReview(v, data) } else if (typeof v === 'string' && (k === 'texture' || (k === 'sound' && (obj.do_sound || obj.sound_type === 'Play')))) { let s = v.replace(/\\{1,2}/g, '/') let m = s.match(/^Interface\/AddOns\/([^\/]+)\//i) if (m) { data.dependencies.add(m[1]) } } } return data } async function ProcessCode(data) { if (!data.id) return var doc = await WagoItem.lookup(data.id) var code = await WagoCode.lookup(data.id, data.version) if (!doc || !code || !code._id || doc.encrypted) { return } if (data.addon && Addons[data.addon]) { const addon = Addons[data.addon] if (addon && addon.addWagoData) { let meta = addon.addWagoData && addon.addWagoData(code, doc) if ((meta && meta.encode) || data.encode || !code.encoded) { code.encoded = await addon.encode(code.json.replace(/\\/g, '\\\\').replace(/"/g, '\\"').trim(), lua.runLua) } if (meta && meta.wago) { doc = meta.wago } } } else if (doc.type) { // match addon by type for (const addon of Object.values(Addons)) { if (doc.type.match(addon.typeMatch)) { let meta = addon.addWagoData && addon.addWagoData(code, doc) if ((meta && meta.encode) || data.encode || !code.encoded) { code.encoded = await addon.encode(code.json.replace(/\\/g, '\\\\').replace(/"/g, '\\"').trim(), lua.runLua) } if (meta && meta.wago) { doc = meta.wago } } } } let err try { switch (doc.type) { case 'SNIPPET': code.customCode = await CodeReview([{id: 'Lua', name: 'Snippet', lua: code.lua}], doc) break case 'WEAKAURA': case 'CLASSIC-WEAKAURA': case 'TBC-WEAKAURA': case 'PLATER': var json = JSON.parse(code.json) code.customCode = await CodeReview(getCode(json, doc.type), doc) const tableMetrics = TableReview(json) tableMetrics.dependencies = [...tableMetrics.dependencies] code.tableMetrics = tableMetrics break } } catch (e) { console.log(data, e) err = true } if (err) throw 'Code Processing Error' doc.blocked = false if (code.version > 1) { await WagoCode.updateMany({auraID: doc._id, _id: {$ne: code._id}}, {$set: {isLatestVersion: false}}) } code.isLatestVersion = true if (code.customCode && code.customCode.length) { doc.hasCustomCode = true code.customCode.forEach(c => { if (c.luacheck && c.luacheck.match(commonRegex.WeakAuraBlacklist)) { doc.blocked = true } }) } doc.codeProcessVersion = codeProcessVersion await doc.save() await code.save() if (doc._userId && !doc.deleted && !doc.expires_at) { elastic.addDoc('import', await doc.indexedImportData) } if (code.customCode.length) { return doc } return null } async function ProcessAllCode() { // return var cursor = WagoItem.find({ deleted: false, _userId: {$exists: true}, codeProcessVersion: {$lt: codeProcessVersion}, type: {$in: ['WEAKAURA', 'CLASSIC-WEAKAURA', 'TBC-WEAKAURA', 'PLATER']}, modified : { $gte: new Date(new Date().setDate(new Date().getDate()-180)) } }).cursor({batchSize:50}) let count = 0 console.log('-------------- CODE SYNC START ----------------') for await (const doc of cursor) { count++ if (doc.deleted) { await elastic.removeDoc('import', doc._id) } else { await ProcessCode({id: doc._id, type: doc.type}) if (count%1000 == 0) { console.log('process code', count) } } } console.log('-------------- CODE SYNC FINISHED ----------------') } function sortJSON(obj) { // if a regular array then its already sorted but still sort any child objects if (Array.isArray(obj)) { for (let i = 0; i < obj.length; i++) { if (obj[i] && typeof obj[i] == 'object') { obj[i] = sortJSON(obj[i]) } } return obj } // sort object as expected var sorted = {} var keys keys = Object.keys(obj) keys.sort(function(key1, key2) { if(key1 < key2) return -1 if(key1 > key2) return 1 return 0 }) for (var i in keys) { var key = keys[i] if (obj[key] && typeof obj[key] == 'object') { sorted[key] = sortJSON(obj[key]) } else { sorted[key] = obj[key] } } return sorted } async function updateWAData (release, assets) { const addonDir = path.resolve(__dirname, '../lua', 'addons' ,'WeakAuras', release.version) await mkdirp(addonDir) const zipFile = path.resolve(addonDir, 'WeakAuras.zip') const writer = require('fs').createWriteStream(zipFile) var axiosDownload = {method: 'GET', responseType: 'stream'} for (let i = 0; i < assets.length; i++) { if (assets[i].name.match(/WeakAuras-[\d.]+\.zip/)) { axiosDownload.url = assets[i].browser_download_url break } } if (!axiosDownload.url) { logError(e, 'Unable to find WeakAura download') return false } const response = await axios(axiosDownload) response.data.pipe(writer) await new Promise((resolve, reject) => { writer.on('finish', resolve) writer.on('error', reject) }) await decompress(zipFile, addonDir) const waLua = await fs.readFile(addonDir + '/WeakAuras/WeakAuras.lua', 'utf8') const versionMatch = waLua.match(/internalVersion\s?=\s?(\d+)/) if (versionMatch && versionMatch[1]) { const internalVersion = parseInt(versionMatch[1]) if (internalVersion) { SiteData.set('weakAuraInternalVersion', internalVersion) return } } // if we get here then internalVersion is not found or is not an integer logError(e, 'Unable to find WeakAura internalVersion') } async function updateMDTData (release, assets) { if (!assets.zipball_url) { logError('Unable to find MDT download', assets) return false } const addonDir = path.resolve(__dirname, '../lua', 'addons' ,'MDT', release.version) await mkdirp(addonDir) const zipFile = path.resolve(addonDir, 'MDT.zip') const writer = require('fs').createWriteStream(zipFile) var axiosDownload = {method: 'GET', responseType: 'stream', url: assets.zipball_url} const response = await axios(axiosDownload) response.data.pipe(writer) await new Promise((resolve, reject) => { writer.on('finish', resolve) writer.on('error', reject) }) await decompress(zipFile, addonDir) // get commit directory const commit = await fs.readdir(addonDir) var mdtData = await lua.BuildMDT_DungeonTable(`${addonDir}/${commit[1]}`) mdtData = JSON.parse(mdtData) // calculate dimensions mdtData.dungeonDimensions = [] mdtData.dungeonEnemies.forEach((enemies, mapID) => { // console.log(mapID) mdtData.dungeonDimensions.push({maxX: -9999999, minX: 9999999, maxY: -9999999, minY: 9999999}) if (!enemies) return enemies.forEach((creature) => { if (!creature || !creature.clones) return creature.clones.forEach((clone) => { if (!clone) { return } mdtData.dungeonDimensions[mapID].maxX = Math.max(mdtData.dungeonDimensions[mapID].maxX, clone.x) mdtData.dungeonDimensions[mapID].minX = Math.min(mdtData.dungeonDimensions[mapID].minX, clone.x) mdtData.dungeonDimensions[mapID].maxY = Math.max(mdtData.dungeonDimensions[mapID].maxY, clone.y) mdtData.dungeonDimensions[mapID].minY = Math.min(mdtData.dungeonDimensions[mapID].minY, clone.y) }) }) }) // save core data plus for each dungeon await SiteData.findByIdAndUpdate('mdtDungeonTable', {value: mdtData}, {upsert: true}).exec() await SiteData.findByIdAndUpdate('mdtAffixWeeks', {value: mdtData.affixWeeks}, {upsert: true}).exec() await cloudflare.zones.purgeCache(config.cloudflare.zoneID, {files: ['https://data.wago.io/data/mdtDungeonTable', 'https://data.wago.io/data/mdtAffixWeeks']}) for (let mapID = 0; mapID < mdtData.dungeonEnemies.length; mapID++) { let Obj = { affixWeeks: mdtData.affixWeeks, dungeonEnemies: mdtData.dungeonEnemies[mapID], enemyHash: md5(JSON.stringify(mdtData.dungeonEnemies[mapID])), mapPOIs: mdtData.mapPOIs[mapID], mapInfo: mdtData.mapInfo[mapID], dungeonTotalCount: mdtData.dungeonTotalCount[mapID], scaleMultiplier: mdtData.scaleMultiplier[mapID], dungeonSubLevels: mdtData.dungeonSubLevels[mapID], dungeonMaps: mdtData.dungeonMaps[mapID], dungeonDimensions: mdtData.dungeonDimensions[mapID] } if (mapID === 15) { Obj.freeholdCrews = mdtData.freeholdCrews } const currentHash = await SiteData.findById('mdtDungeonTable-' + mapID).exec() await SiteData.findByIdAndUpdate('mdtDungeonTable-' + mapID, {value: Obj}, {upsert: true}).exec() await cloudflare.zones.purgeCache(config.cloudflare.zoneID, {files: ['https://data.wago.io/data/mdtDungeonTable-' + mapID]}) // currentHash.value.enemyHash = null // force regenerate // if new portrait maps are required if ((!currentHash || currentHash.value.enemyHash !== Obj.enemyHash) && Obj.dungeonMaps && Obj.dungeonEnemies && Obj.dungeonEnemies.length) { try { console.log('make portrait map', mapID) for (let subMapID = 1; subMapID <= Object.keys(Obj.dungeonMaps).length; subMapID++) { if (mapID === 18) { await buildStaticMDTPortraits(Obj, mapID, subMapID, false, 1) await buildStaticMDTPortraits(Obj, mapID, subMapID, false, 2) await buildStaticMDTPortraits(Obj, mapID, subMapID, true, 1) await buildStaticMDTPortraits(Obj, mapID, subMapID, true, 2) } else { await buildStaticMDTPortraits(Obj, mapID, subMapID, false) await buildStaticMDTPortraits(Obj, mapID, subMapID, true) } break } logger({e_c: 'Generate MDT portrait maps', e_a: Obj.dungeonMaps['0'], e_n: Obj.dungeonMaps['0']}) } catch (e) { logError(e, 'Generating MDT portrait maps ' + Obj.dungeonMaps['0']) } } } return } async function buildStaticMDTPortraits(json, mapID, subMapID, teeming, faction) { // this is very finicky so only run it locally to generate the images if (config.env !== 'development') { return } const puppeteer = require('puppeteer-firefox') const mdtScale = 539 / 450 if (teeming) teeming = '-Teeming' else teeming = '' var imgName = `portraitMap-${mapID}-${subMapID}${teeming}` if (faction) { imgName = imgName + '-Faction' + faction } console.log('make map for', json.dungeonMaps["0"], imgName) var html = `<!DOCTYPE html> <html> <head> <script src="https://unpkg.com/[email protected]/konva.min.js"></script> <meta charset="utf-8"> <title>Konva Circle Demo</title> <style> body { margin: 0; padding: 0; overflow: hidden; background-color: #F0F0F0; } </style> </head> <body> <div id="container"></div> <script> var multiplier = 5 var stage = new Konva.Stage({ container: 'container', width: 1024 * multiplier, height: 768 * multiplier }); var layer = new Konva.Layer(); var enemyPortraits = new Image() enemyPortraits.src = 'https://wago.io/mdt/portraits-${mapID}.png?' enemyPortraits.crossOrigin = 'Anonymous' enemyPortraits.onload = () => { console.log(enemyPortraits.src, 'loaded') ` json.dungeonEnemies.forEach((creature, i) => { if (!creature || !creature.clones) return creature.clones.forEach((clone, j) => { if (clone && (!clone.sublevel || clone.sublevel === subMapID) && (!clone.teeming || (clone.teeming && teeming)) && (!clone.faction || (clone.faction === faction))) { html = html + ` var circle${i}_${j} = new Konva.Circle({ x: ${clone.x * mdtScale} * multiplier, y: ${clone.y * -mdtScale} * multiplier, radius: ${Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1) * (json.scaleMultiplier || 1)) / mdtScale} * multiplier, fillPatternX: ${(-Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1))) / mdtScale} * multiplier, fillPatternY: ${(-Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1))) / mdtScale} * multiplier, fillPatternImage: enemyPortraits, fillPatternOffset: ${getEnemyPortraitOffset(json.dungeonEnemies.length, i, 115)}, fillPatternRepeat: 'no-repeat', fillPatternScaleX: ${Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1)) / 64} * multiplier, fillPatternScaleY: ${Math.round(5 * creature.scale * (creature.isBoss ? 1.7 : 1)) / 64} * multiplier, stroke: '${creature.isBoss ? 'gold' : 'black'}', strokeWidth: .5 * multiplier }) // add the shape to the layer layer.add(circle${i}_${j});` } }) return }) html = html + ` stage.add(layer); setTimeout(() => { var img = document.createElement('img') img.src = stage.toDataURL() img.id = 'img' document.body.appendChild(img) document.getElementById('container').remove() }, 1000) } </script> </body> </html>` // await fs.writeFile('../test.html', html, 'utf8') console.log('launch puppeteer') const browser = await puppeteer.launch({args: [ '--disable-web-security', // ignore cors errors ]}) const page = await browser.newPage() await page.setCacheEnabled(false) await page.setContent(html) page.on('console', msg => { for (let i = 0; i < msg.args().length; ++i) console.log(`${i}: ${msg.args()[i]}`); }); await page.waitForSelector('img', {timeout: 120000}) const base64 = await page.evaluate(() => { return document.getElementById('img').src }) await browser.close() const buffer = Buffer.from(base64.replace(/^data:image\/\w+;base64,/, ""), 'base64') await image.saveMdtPortraitMap(buffer, imgName) return } function getEnemyPortraitOffset (numCreatures, creatureIndex, size) { var row = 0 size = size || 36 if (creatureIndex >= Math.ceil(numCreatures / 2)) { row++ } var o = {x: ((creatureIndex) - (Math.ceil(numCreatures / 2) * row)) * size, y: row * size} return `{x: ${o.x}, y: ${o.y}}` } /* UpdateTwitchSubs: async (req) => { const users = await User.find({"roles.pro_subscriber": true, "twitch.refreshToken": {$exists: true}}).exec() users.forEach(async (user) => { try { var refresh = await axios.post('https://id.twitch.tv/oauth2/token', querystring.stringify({ client_id: config.auth.twitch.clientID, client_secret: config.auth.twitch.clientSecret, grant_type: 'refresh_token', refresh_token: user.twitch.refreshToken }), { headers: { "Content-Type": "application/x-www-form-urlencoded" } }) var validate = await axios.get('https://id.twitch.tv/oauth2/validate', {headers: {Authorization: 'OAuth ' + refresh.data.access_token }}) if (validate.data.user_id === user.twitch.id) { user.twitch.refreshToken = refresh.data.refresh_token await user.save() var subs = await axios.get('https://api.twitch.tv/helix/subscriptions', {params: {broadcaster_id: user.twitch.id}, headers: {Authorization: 'Bearer ' + refresh.data.access_token }}) if (subs.data.data) { var currentSubs = [] subs.data.data.forEach(async (subscriber) => { var subUser = await User.findOne({"twitch.id": subscriber.user_id}).exec() if (subUser) { currentSubs.push(subUser._id) if (subUser.twitch.subscribedTo.indexOf(user.twitch.id) === -1) { subUser.twitch.subscribedTo.push(user.twitch.id) await subUser.save() } } }) // remove old subs if (currentSubs.length) { var exSubs = await User.find({"twitch.subscribedTo": user.twitch.id, _id: {$nin: currentSubs}}).exec() exSubs.forEach(async (exSubscriber) => { var i = exSubscriber.twitch.subscribedTo.indexOf(user.twitch.id) exSubscriber.twitch.subscribedTo.splice(i, 1) await exSubscriber.save() }) } } } else { // no longer allowing link between wago and twitch user.twitch.refreshToken = null await user.save() } } catch (e) { if (e.response && e.response.status === 401 && user) { user.twitch.refreshToken = null await user.save() } req.trackError(e, 'Cron: UpdateTwitchSubs') } }) } } */ /* // TODO stats page function generateStats(res) { const startDate = new Date(1463788800000) // May 21 2016 async.series({ WeakAuras: (done) => { Stats.findOne({name: 'Total WeakAuras'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "WEAKAURAS2", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total WeakAuras', date: dDate}, {name: 'Total WeakAuras', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, ElvUI: (done) => { Stats.findOne({name: 'Total ElvUI'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "ELVUI", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total ElvUI', date: dDate}, {name: 'Total ElvUI', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, MDT: (done) => { Stats.findOne({name: 'Total MDT Routes'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "MDT", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total MDT Routes', date: dDate}, {name: 'Total MDT Routes', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, TotalRP: (done) => { Stats.findOne({name: 'Total TotalRP'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "TOTALRP3", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total TotalRP', date: dDate}, {name: 'Total TotalRP', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, VuhDo: (done) => { Stats.findOne({name: 'Total VuhDo'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } while (date < today) { let dDate = new Date(date) WagoItem.count({type: "VUHDO", created: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((num) => { Stats.findOneAndUpdate({name: 'Total VuhDo', date: dDate}, {name: 'Total VuhDo', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }) }, Companion: (done) => { let date = startDate let today = new Date while (date < today) { let dDate = new Date(date) WagoFavorites.distinct('appID', {type: 'Install', timestamp: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((IDs) => { var num = IDs.length Stats.findOneAndUpdate({name: 'WeakAura Companion Installs', date: dDate}, {name: 'WeakAura Companion Installs', date: dDate, value: num}, {upsert: true}).exec() }) date = date.nextWeek() } done() }, WACode: (done) => { Stats.findOne({name: 'WeakAura Region group'}).sort({date: -1}).then((stat) => { let date let today = new Date() if (!stat) { date = startDate } else if (stat.date.nextWeek() < today) { date = stat.date.nextWeek() } else { // up to date already return done() } async.whilst( () => { return date < today }, (cb) => { let dDate = new Date(date) let countAuthorOptions = 0 let countBuffTrigger2 = 0 let countTriggers = 0 let countTriggerCustomCode = 0 let countTriggerCustomCodeEveryFrame = 0 let countRegionTypes = {group:0, dynamicgroup:0, aurabar:0, icon:0, text:0, model:0, model:0, texture:0, progresstexture:0, stopmotion:0} WagoCode.find({updated: {"$gte": dDate, "$lt": dDate.nextWeek()}}).then((wa) => { async.forEach(wa, (code, next) => { // confirm import is a weakaura WagoItem.findOne({_id: code.auraID, type: 'WEAKAURA'}).then((aura) => { if (aura) { var json = JSON.parse(code.json) if (!json.d) { return next() } if (!json.c) { json.c = [json.d] } else { countRegionTypes[json.d.regionType]++ } for (let i = 0; i < json.c.length; i++) { if (!json.c[i]) continue countRegionTypes[json.c[i].regionType]++ // author options feature if (json.c[i].authorOptions && json.c[i].authorOptions.length) { countAuthorOptions++ } // bufftrigger2 feature if (json.c[i].triggers && json.c[i].triggers['1']) { countBuffTrigger2++ } // count triggers with custom code if (json.c[i].trigger) { countTriggers++ if (json.c[i].trigger.type === 'custom' && json.c[i].trigger.custom) { countTriggerCustomCode++ if (json.c[i].trigger.check === 'update') { countTriggerCustomCodeEveryFrame++ } } } if (json.c[i].additional_triggers && json.c[i].additional_triggers.length) { for (let k = 0; k < json.c[i].additional_triggers.length; k++) { countTriggers++ if (json.c[i].additional_triggers[k].type === 'custom' && json.c[i].additional_triggers[k].custom) { countTriggerCustomCode++ if (json.c[i].trigger.check === 'update') { countTriggerCustomCodeEveryFrame++ } } } } if (json.c[i].triggers) { for (var k in json.c[i].triggers) { if (parseInt(k) && json.c[i].triggers[k].trigger) { countTriggers++ if (json.c[i].triggers[k].trigger.type === 'custom' && json.c[i].triggers[k].trigger.custom) { countTriggerCustomCode++ if (json.c[i].triggers[k].trigger.check === 'update') { countTriggerCustomCodeEveryFrame++ } } } } } } } next() }) }, () => { Stats.findOneAndUpdate({name: 'WeakAura Imports with Author Options Feature', date: dDate}, {name: 'WeakAura Imports with Author Options Feature', date: dDate, value: countAuthorOptions}, {upsert: true}).exec() Stats.findOneAndUpdate({name: 'WeakAura Imports with BuffTrigger2 Feature', date: dDate}, {name: 'WeakAura Imports with BuffTrigger2 Feature', date: dDate, value: countBuffTrigger2}, {upsert: true}).exec() // Stats.findOneAndUpdate({name: 'WeakAura Triggers', date: dDate}, {name: 'WeakAura Triggers', date: dDate, value: countTriggers}, {upsert: true}).exec() Stats.findOneAndUpdate({name: 'WeakAura Triggers with Custom Code', date: dDate}, {name: 'WeakAura Triggers with Custom Code', date: dDate, value: countTriggerCustomCode}, {upsert: true}).exec() Stats.findOneAndUpdate({name: 'WeakAura Triggers with Custom Code Updating Every Frame', date: dDate}, {name: 'WeakAura Triggers with Custom Code Updating Every Frame', date: dDate, value: countTriggerCustomCodeEveryFrame}, {upsert: true}).exec() Object.keys(countRegionTypes).forEach((region) => { if (region && region !== 'undefined') { console.log(region, countRegionTypes[region]) Stats.findOneAndUpdate({name: 'WeakAura Region ' + region, date: dDate}, {name: 'WeakAura Region ' + region, date: dDate, value: countRegionTypes[region]}, {upsert: true}).exec() } }) date = date.nextWeek() cb() }) }) }, () => { done() }) }) } }, () => { res.send({done: true}) }) } Date.prototype.nextWeek = function() { var date = new Date(this.valueOf()) date.setDate(date.getDate() + 7) return date } */
fix elastic sync
backend/api/helpers/tasks.js
fix elastic sync
<ide><path>ackend/api/helpers/tasks.js <ide> let doc <ide> switch (table){ <ide> case 'import': <del> const cursorImports = WagoItem.find({_userId: {$exists: true}, deleted: false, expires_at: null}).cursor() <add> const cursorImports = WagoItem.find({_userId: {$exists: true}, expires_at: null}).cursor() <ide> doc = await cursorImports.next() <del> while (doc && count < 300000) { <add> while (doc) { <ide> count++ <add> if (doc.deleted) { <add> elastic.removeDoc('import', await doc._id) <add> } <add> else { <ide> elastic.addDoc('import', await doc.indexedImportData, true) <add> } <ide> doc = await cursorImports.next() <ide> if(count%500 === 0) { <ide> console.log(table, count) <ide> break <ide> <ide> case 'code': <del> const cursorCode = WagoItem.find({_userId: {$exists: true}, deleted: false, expires_at: null, hasCustomCode: true}).cursor() <add> const cursorCode = WagoItem.find({_userId: {$exists: true}, expires_at: null, hasCustomCode: true}).cursor() <ide> doc = await cursorCode.next() <ide> while (doc && count < 300000) { <ide> count++ <ide> let code = await doc.indexedCodeData <ide> if (code) { <add> if (doc.deleted) { <add> elastic.removeDoc('code', await doc._id) <add> } <add> else { <ide> elastic.addDoc('code', code, true) <add> } <add> } <add> else { <add> elastic.removeDoc('code', await doc._id) <ide> } <ide> doc = await cursorCode.next() <ide> if(count%500 === 0) {
Java
bsd-2-clause
4f1eaa9caf162f71cbde4e06986cfd02b1a05a2c
0
chris-watson/chpl-api,chris-watson/chpl-api
package gov.healthit.chpl.web.controller; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.security.acls.domain.PrincipalSid; import org.springframework.security.acls.model.Permission; import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; import com.fasterxml.jackson.core.JsonProcessingException; import gov.healthit.chpl.auth.dto.UserDTO; import gov.healthit.chpl.auth.dto.UserPermissionDTO; import gov.healthit.chpl.auth.json.User; import gov.healthit.chpl.auth.manager.UserManager; import gov.healthit.chpl.auth.user.UserRetrievalException; import gov.healthit.chpl.dao.EntityCreationException; import gov.healthit.chpl.dao.EntityRetrievalException; import gov.healthit.chpl.domain.ChplPermission; import gov.healthit.chpl.domain.PermittedUser; import gov.healthit.chpl.domain.TestingLab; import gov.healthit.chpl.domain.UpdateUserAndAtlRequest; import gov.healthit.chpl.dto.AddressDTO; import gov.healthit.chpl.dto.TestingLabDTO; import gov.healthit.chpl.manager.TestingLabManager; import gov.healthit.chpl.web.controller.results.PermittedUserResults; import gov.healthit.chpl.web.controller.results.TestingLabResults; import io.swagger.annotations.Api; @Api(value="atls") @RestController @RequestMapping("/atls") public class TestingLabController { @Autowired TestingLabManager atlManager; @Autowired UserManager userManager; private static final Logger logger = LogManager.getLogger(TestingLabController.class); @RequestMapping(value="/", method=RequestMethod.GET, produces="application/json; charset=utf-8") public @ResponseBody TestingLabResults getAtls(@RequestParam(required=false, defaultValue="false") boolean editable) { TestingLabResults results = new TestingLabResults(); List<TestingLabDTO> atls = null; if(editable) { atls = atlManager.getAllForUser(); } else { atls = atlManager.getAll(); } if(atls != null) { for(TestingLabDTO atl : atls) { results.getAtls().add(new TestingLab(atl)); } } return results; } @RequestMapping(value="/{atlId}", method=RequestMethod.GET, produces="application/json; charset=utf-8") public @ResponseBody TestingLab getAtlById(@PathVariable("atlId") Long atlId) throws EntityRetrievalException { TestingLabDTO atl = atlManager.getById(atlId); return new TestingLab(atl); } @RequestMapping(value="/create", method= RequestMethod.POST, consumes= MediaType.APPLICATION_JSON_VALUE, produces="application/json; charset=utf-8") public TestingLab create(@RequestBody TestingLab atlInfo) throws InvalidArgumentsException, UserRetrievalException, EntityRetrievalException, EntityCreationException, JsonProcessingException { TestingLabDTO toCreate = new TestingLabDTO(); toCreate.setTestingLabCode(atlInfo.getAtlCode()); toCreate.setAccredidationNumber(atlInfo.getAccredidationNumber()); if(StringUtils.isEmpty(atlInfo.getName())) { throw new InvalidArgumentsException("A name is required for a testing lab"); } toCreate.setName(atlInfo.getName()); toCreate.setWebsite(atlInfo.getWebsite()); if(atlInfo.getAddress() == null) { throw new InvalidArgumentsException("An address is required for a new testing lab"); } AddressDTO address = new AddressDTO(); address.setId(atlInfo.getAddress().getAddressId()); address.setStreetLineOne(atlInfo.getAddress().getLine1()); address.setStreetLineTwo(atlInfo.getAddress().getLine2()); address.setCity(atlInfo.getAddress().getCity()); address.setState(atlInfo.getAddress().getState()); address.setZipcode(atlInfo.getAddress().getZipcode()); address.setCountry(atlInfo.getAddress().getCountry()); toCreate.setAddress(address); toCreate = atlManager.create(toCreate); return new TestingLab(toCreate); } @RequestMapping(value="/update", method= RequestMethod.POST, consumes= MediaType.APPLICATION_JSON_VALUE, produces="application/json; charset=utf-8") public TestingLab update(@RequestBody TestingLab atlInfo) throws InvalidArgumentsException, EntityRetrievalException, JsonProcessingException, EntityCreationException { TestingLabDTO toUpdate = new TestingLabDTO(); toUpdate.setId(atlInfo.getId()); toUpdate.setTestingLabCode(atlInfo.getAtlCode()); toUpdate.setAccredidationNumber(atlInfo.getAccredidationNumber()); if(StringUtils.isEmpty(atlInfo.getName())) { throw new InvalidArgumentsException("A name is required for a testing lab"); } toUpdate.setName(atlInfo.getName()); toUpdate.setWebsite(atlInfo.getWebsite()); if(atlInfo.getAddress() == null) { throw new InvalidArgumentsException("An address is required to update the testing lab"); } AddressDTO address = new AddressDTO(); address.setId(atlInfo.getAddress().getAddressId()); address.setStreetLineOne(atlInfo.getAddress().getLine1()); address.setStreetLineTwo(atlInfo.getAddress().getLine2()); address.setCity(atlInfo.getAddress().getCity()); address.setState(atlInfo.getAddress().getState()); address.setZipcode(atlInfo.getAddress().getZipcode()); address.setCountry(atlInfo.getAddress().getCountry()); toUpdate.setAddress(address); TestingLabDTO result = atlManager.update(toUpdate); return new TestingLab(result); } @RequestMapping(value="/{atlId}/delete", method= RequestMethod.POST, produces="application/json; charset=utf-8") public String deleteAtl(@PathVariable("atlId") Long atlId) throws JsonProcessingException, EntityCreationException, EntityRetrievalException, UserRetrievalException { TestingLabDTO toDelete = atlManager.getById(atlId); atlManager.delete(toDelete); return "{\"deletedAtl\" : true }"; } @RequestMapping(value="/add_user", method= RequestMethod.POST, consumes= MediaType.APPLICATION_JSON_VALUE, produces="application/json; charset=utf-8") public String addUserToAtl(@RequestBody UpdateUserAndAtlRequest updateRequest) throws UserRetrievalException, EntityRetrievalException, InvalidArgumentsException { if(updateRequest.getAtlId() == null || updateRequest.getUserId() == null || updateRequest.getUserId() <= 0 || updateRequest.getAuthority() == null) { throw new InvalidArgumentsException("ATL ID, User ID (greater than 0), and Authority are required."); } UserDTO user = userManager.getById(updateRequest.getUserId()); TestingLabDTO atl = atlManager.getById(updateRequest.getAtlId()); if(user == null || atl == null) { throw new InvalidArgumentsException("Could not find either ATL or User specified"); } Permission permission = ChplPermission.toPermission(updateRequest.getAuthority()); atlManager.addPermission(atl, updateRequest.getUserId(), permission); return "{\"userAdded\" : true }"; } @RequestMapping(value="{atlId}/remove_user/{userId}", method= RequestMethod.POST, consumes= MediaType.APPLICATION_JSON_VALUE, produces="application/json; charset=utf-8") public String deleteUserFromAtl(@PathVariable Long atlId, @PathVariable Long userId) throws UserRetrievalException, EntityRetrievalException, InvalidArgumentsException{ UserDTO user = userManager.getById(userId); TestingLabDTO atl = atlManager.getById(atlId); if(user == null || atl == null) { throw new InvalidArgumentsException("Could not find either ATL or User specified"); } //delete all permissions on that atl atlManager.deleteAllPermissionsOnAtl(atl, new PrincipalSid(user.getSubjectName())); return "{\"userDeleted\" : true }"; } @RequestMapping(value="/{atlId}/users", method=RequestMethod.GET, produces="application/json; charset=utf-8") public @ResponseBody PermittedUserResults getUsers(@PathVariable("atlId") Long atlId) throws InvalidArgumentsException, EntityRetrievalException { TestingLabDTO atl = atlManager.getById(atlId); if(atl == null) { throw new InvalidArgumentsException("Could not find the ATL specified."); } List<PermittedUser> atlUsers = new ArrayList<PermittedUser>(); List<UserDTO> users = atlManager.getAllUsersOnAtl(atl); for(UserDTO user : users) { //only show users that have ROLE_ATL_* Set<UserPermissionDTO> systemPermissions = userManager.getGrantedPermissionsForUser(user); boolean hasAtlPermission = false; for(UserPermissionDTO systemPermission : systemPermissions) { if(systemPermission.getAuthority().startsWith("ROLE_ATL_")) { hasAtlPermission = true; } } if(hasAtlPermission) { List<String> roleNames = new ArrayList<String>(); for(UserPermissionDTO role : systemPermissions) { roleNames.add(role.getAuthority()); } List<Permission> permissions = atlManager.getPermissionsForUser(atl, new PrincipalSid(user.getSubjectName())); List<String> atlPerm = new ArrayList<String>(permissions.size()); for(Permission permission : permissions) { ChplPermission perm = ChplPermission.fromPermission(permission); if(perm != null) { atlPerm.add(perm.toString()); } } PermittedUser userInfo = new PermittedUser(); userInfo.setUser(new User(user)); userInfo.setPermissions(atlPerm); userInfo.setRoles(roleNames); atlUsers.add(userInfo); } } PermittedUserResults results = new PermittedUserResults(); results.setUsers(atlUsers); return results; } }
chpl/chpl-service/src/main/java/gov/healthit/chpl/web/controller/TestingLabController.java
package gov.healthit.chpl.web.controller; import java.util.ArrayList; import java.util.List; import java.util.Set; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.security.acls.domain.PrincipalSid; import org.springframework.security.acls.model.Permission; import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; import com.fasterxml.jackson.core.JsonProcessingException; import gov.healthit.chpl.auth.dto.UserDTO; import gov.healthit.chpl.auth.dto.UserPermissionDTO; import gov.healthit.chpl.auth.json.User; import gov.healthit.chpl.auth.manager.UserManager; import gov.healthit.chpl.auth.user.UserRetrievalException; import gov.healthit.chpl.dao.EntityCreationException; import gov.healthit.chpl.dao.EntityRetrievalException; import gov.healthit.chpl.domain.ChplPermission; import gov.healthit.chpl.domain.PermittedUser; import gov.healthit.chpl.domain.TestingLab; import gov.healthit.chpl.domain.UpdateUserAndAtlRequest; import gov.healthit.chpl.dto.AddressDTO; import gov.healthit.chpl.dto.TestingLabDTO; import gov.healthit.chpl.manager.TestingLabManager; import gov.healthit.chpl.web.controller.results.PermittedUserResults; import gov.healthit.chpl.web.controller.results.TestingLabResults; import io.swagger.annotations.Api; @Api(value="atls") @RestController @RequestMapping("/atls") public class TestingLabController { @Autowired TestingLabManager atlManager; @Autowired UserManager userManager; private static final Logger logger = LogManager.getLogger(TestingLabController.class); @RequestMapping(value="/", method=RequestMethod.GET, produces="application/json; charset=utf-8") public @ResponseBody TestingLabResults getAtls(@RequestParam(required=false, defaultValue="false") boolean editable) { TestingLabResults results = new TestingLabResults(); List<TestingLabDTO> atls = null; if(editable) { atls = atlManager.getAllForUser(); } else { atls = atlManager.getAll(); } if(atls != null) { for(TestingLabDTO atl : atls) { results.getAtls().add(new TestingLab(atl)); } } return results; } @RequestMapping(value="/{atlId}", method=RequestMethod.GET, produces="application/json; charset=utf-8") public @ResponseBody TestingLab getAtlById(@PathVariable("atlId") Long atlId) throws EntityRetrievalException { TestingLabDTO atl = atlManager.getById(atlId); return new TestingLab(atl); } @RequestMapping(value="/create", method= RequestMethod.POST, consumes= MediaType.APPLICATION_JSON_VALUE, produces="application/json; charset=utf-8") public TestingLab create(@RequestBody TestingLab atlInfo) throws InvalidArgumentsException, UserRetrievalException, EntityRetrievalException, EntityCreationException, JsonProcessingException { TestingLabDTO toCreate = new TestingLabDTO(); toCreate.setTestingLabCode(atlInfo.getAtlCode()); toCreate.setAccredidationNumber(atlInfo.getAccredidationNumber()); if(StringUtils.isEmpty(atlInfo.getName())) { throw new InvalidArgumentsException("A name is required for a testing lab"); } toCreate.setName(atlInfo.getName()); toCreate.setWebsite(atlInfo.getWebsite()); if(atlInfo.getAddress() == null) { throw new InvalidArgumentsException("An address is required for a new testing lab"); } AddressDTO address = new AddressDTO(); address.setId(atlInfo.getAddress().getAddressId()); address.setStreetLineOne(atlInfo.getAddress().getLine1()); address.setStreetLineTwo(atlInfo.getAddress().getLine2()); address.setCity(atlInfo.getAddress().getCity()); address.setState(atlInfo.getAddress().getState()); address.setZipcode(atlInfo.getAddress().getZipcode()); address.setCountry(atlInfo.getAddress().getCountry()); toCreate.setAddress(address); toCreate = atlManager.create(toCreate); return new TestingLab(toCreate); } @RequestMapping(value="/update", method= RequestMethod.POST, consumes= MediaType.APPLICATION_JSON_VALUE, produces="application/json; charset=utf-8") public TestingLab update(@RequestBody TestingLab atlInfo) throws InvalidArgumentsException, EntityRetrievalException, JsonProcessingException, EntityCreationException { TestingLabDTO toUpdate = new TestingLabDTO(); toUpdate.setId(atlInfo.getId()); toUpdate.setTestingLabCode(atlInfo.getAtlCode()); toUpdate.setAccredidationNumber(atlInfo.getAccredidationNumber()); if(StringUtils.isEmpty(atlInfo.getName())) { throw new InvalidArgumentsException("A name is required for a testing lab"); } toUpdate.setName(atlInfo.getName()); toUpdate.setWebsite(atlInfo.getWebsite()); if(atlInfo.getAddress() == null) { throw new InvalidArgumentsException("An address is required to update the testing lab"); } AddressDTO address = new AddressDTO(); address.setId(atlInfo.getAddress().getAddressId()); address.setStreetLineOne(atlInfo.getAddress().getLine1()); address.setStreetLineTwo(atlInfo.getAddress().getLine2()); address.setCity(atlInfo.getAddress().getCity()); address.setState(atlInfo.getAddress().getState()); address.setZipcode(atlInfo.getAddress().getZipcode()); address.setCountry(atlInfo.getAddress().getCountry()); toUpdate.setAddress(address); TestingLabDTO result = atlManager.update(toUpdate); return new TestingLab(result); } @RequestMapping(value="/{atlId}/delete", method= RequestMethod.POST, produces="application/json; charset=utf-8") public String deleteAtl(@PathVariable("atlId") Long atlId) throws JsonProcessingException, EntityCreationException, EntityRetrievalException { TestingLabDTO toDelete = atlManager.getById(atlId); atlManager.delete(toDelete); return "{\"deletedAtl\" : true }"; } @RequestMapping(value="/add_user", method= RequestMethod.POST, consumes= MediaType.APPLICATION_JSON_VALUE, produces="application/json; charset=utf-8") public String addUserToAtl(@RequestBody UpdateUserAndAtlRequest updateRequest) throws UserRetrievalException, EntityRetrievalException, InvalidArgumentsException { if(updateRequest.getAtlId() == null || updateRequest.getUserId() == null || updateRequest.getUserId() <= 0 || updateRequest.getAuthority() == null) { throw new InvalidArgumentsException("ATL ID, User ID (greater than 0), and Authority are required."); } UserDTO user = userManager.getById(updateRequest.getUserId()); TestingLabDTO atl = atlManager.getById(updateRequest.getAtlId()); if(user == null || atl == null) { throw new InvalidArgumentsException("Could not find either ATL or User specified"); } Permission permission = ChplPermission.toPermission(updateRequest.getAuthority()); atlManager.addPermission(atl, updateRequest.getUserId(), permission); return "{\"userAdded\" : true }"; } @RequestMapping(value="{atlId}/remove_user/{userId}", method= RequestMethod.POST, consumes= MediaType.APPLICATION_JSON_VALUE, produces="application/json; charset=utf-8") public String deleteUserFromAtl(@PathVariable Long atlId, @PathVariable Long userId) throws UserRetrievalException, EntityRetrievalException, InvalidArgumentsException{ UserDTO user = userManager.getById(userId); TestingLabDTO atl = atlManager.getById(atlId); if(user == null || atl == null) { throw new InvalidArgumentsException("Could not find either ATL or User specified"); } //delete all permissions on that atl atlManager.deleteAllPermissionsOnAtl(atl, new PrincipalSid(user.getSubjectName())); return "{\"userDeleted\" : true }"; } @RequestMapping(value="/{atlId}/users", method=RequestMethod.GET, produces="application/json; charset=utf-8") public @ResponseBody PermittedUserResults getUsers(@PathVariable("atlId") Long atlId) throws InvalidArgumentsException, EntityRetrievalException { TestingLabDTO atl = atlManager.getById(atlId); if(atl == null) { throw new InvalidArgumentsException("Could not find the ATL specified."); } List<PermittedUser> atlUsers = new ArrayList<PermittedUser>(); List<UserDTO> users = atlManager.getAllUsersOnAtl(atl); for(UserDTO user : users) { //only show users that have ROLE_ATL_* Set<UserPermissionDTO> systemPermissions = userManager.getGrantedPermissionsForUser(user); boolean hasAtlPermission = false; for(UserPermissionDTO systemPermission : systemPermissions) { if(systemPermission.getAuthority().startsWith("ROLE_ATL_")) { hasAtlPermission = true; } } if(hasAtlPermission) { List<String> roleNames = new ArrayList<String>(); for(UserPermissionDTO role : systemPermissions) { roleNames.add(role.getAuthority()); } List<Permission> permissions = atlManager.getPermissionsForUser(atl, new PrincipalSid(user.getSubjectName())); List<String> atlPerm = new ArrayList<String>(permissions.size()); for(Permission permission : permissions) { ChplPermission perm = ChplPermission.fromPermission(permission); if(perm != null) { atlPerm.add(perm.toString()); } } PermittedUser userInfo = new PermittedUser(); userInfo.setUser(new User(user)); userInfo.setPermissions(atlPerm); userInfo.setRoles(roleNames); atlUsers.add(userInfo); } } PermittedUserResults results = new PermittedUserResults(); results.setUsers(atlUsers); return results; } }
development: OCD-485: fix error in controller
chpl/chpl-service/src/main/java/gov/healthit/chpl/web/controller/TestingLabController.java
development: OCD-485: fix error in controller
<ide><path>hpl/chpl-service/src/main/java/gov/healthit/chpl/web/controller/TestingLabController.java <ide> <ide> @RequestMapping(value="/{atlId}/delete", method= RequestMethod.POST, <ide> produces="application/json; charset=utf-8") <del> public String deleteAtl(@PathVariable("atlId") Long atlId) throws JsonProcessingException, EntityCreationException, EntityRetrievalException { <add> public String deleteAtl(@PathVariable("atlId") Long atlId) <add> throws JsonProcessingException, EntityCreationException, EntityRetrievalException, UserRetrievalException { <ide> <ide> TestingLabDTO toDelete = atlManager.getById(atlId); <ide> atlManager.delete(toDelete);
Java
apache-2.0
3ccabd8a8080c11adfb611601b80c1a035126f41
0
QuickBlox/q-municate-android
package com.quickblox.q_municate.ui.base; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.support.v4.content.LocalBroadcastManager; import android.util.Log; import com.facebook.Session; import com.quickblox.module.auth.model.QBProvider; import com.quickblox.q_municate.R; import com.quickblox.q_municate.core.command.Command; import com.quickblox.q_municate.model.AppSession; import com.quickblox.q_municate.model.LoginType; import com.quickblox.q_municate.qb.commands.QBLoginRestCommand; import com.quickblox.q_municate.qb.commands.QBLoginRestWithSocialCommand; import com.quickblox.q_municate.service.QBServiceConsts; import com.quickblox.q_municate.ui.splash.SplashActivity; import com.quickblox.q_municate.utils.ErrorUtils; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; //This class uses to delegate common functionality from different types of activity(Activity, FragmentActivity) public class ActivityDelegator extends BaseActivityDelegator{ private BaseBroadcastReceiver broadcastReceiver; private GlobalBroadcastReceiver globalBroadcastReceiver; private Map<String, Set<Command>> broadcastCommandMap = new HashMap<String, Set<Command>>(); private GlobalActionsListener actionsListener; public ActivityDelegator(Context context, GlobalActionsListener actionsListener) { super(context); this.actionsListener = actionsListener; } public void forceRelogin(){ ErrorUtils.showError(getContext(), getContext().getString(R.string.dlg_force_relogin_on_token_required)); SplashActivity.start(getContext()); ((Activity)getContext()).finish(); } public void refreshSession() { if (LoginType.EMAIL.equals(AppSession.getSession().getLoginType())) { QBLoginRestCommand.start(getContext(), AppSession.getSession().getUser()); } else { QBLoginRestWithSocialCommand.start(getContext(), QBProvider.FACEBOOK, Session.getActiveSession().getAccessToken(), null); } } public void onCreate() { broadcastReceiver = new BaseBroadcastReceiver(); globalBroadcastReceiver = new GlobalBroadcastReceiver(); } public void addAction(String action, Command command) { Set<Command> commandSet = broadcastCommandMap.get(action); if(commandSet == null){ commandSet = new HashSet<Command>(); broadcastCommandMap.put(action, commandSet); } commandSet.add(command); } public boolean hasAction(String action) { return broadcastCommandMap.containsKey(action); } public void removeAction(String action) { broadcastCommandMap.remove(action); } public void updateBroadcastActionList() { LocalBroadcastManager.getInstance(getContext()).unregisterReceiver(broadcastReceiver); IntentFilter intentFilter = new IntentFilter(); for (String commandName : broadcastCommandMap.keySet()) { intentFilter.addAction(commandName); } LocalBroadcastManager.getInstance(getContext()).registerReceiver(broadcastReceiver, intentFilter); } public void onPause() { unregisterBroadcastReceiver(); } public void onResume() { registerGlobalReceiver(); updateBroadcastActionList(); } private void registerGlobalReceiver(){ IntentFilter globalActionsIntentFilter = new IntentFilter(); globalActionsIntentFilter.addAction(QBServiceConsts.GOT_CHAT_MESSAGE); globalActionsIntentFilter.addAction(QBServiceConsts.FORCE_RELOGIN); globalActionsIntentFilter.addAction(QBServiceConsts.REFRESH_SESSION); LocalBroadcastManager.getInstance(getContext()).registerReceiver(globalBroadcastReceiver, globalActionsIntentFilter); } private void unregisterBroadcastReceiver() { LocalBroadcastManager.getInstance(getContext()).unregisterReceiver(globalBroadcastReceiver); LocalBroadcastManager.getInstance(getContext()).unregisterReceiver(broadcastReceiver); } private class BaseBroadcastReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (intent != null && (action) != null) { Log.d("STEPS", "executing " + action); Set<Command> commandSet = broadcastCommandMap.get(action); if(commandSet != null && !commandSet.isEmpty()) { for (Command command : commandSet) { try { command.execute(intent.getExtras()); } catch (Exception e) { ErrorUtils.logError(e); } } } } } } private class GlobalBroadcastReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { Bundle extras = intent.getExtras(); if (extras != null && QBServiceConsts.GOT_CHAT_MESSAGE.equals(intent.getAction())) { if (actionsListener != null){ actionsListener.onReceiveChatMessageAction(intent.getExtras()); } } else if (QBServiceConsts.FORCE_RELOGIN.equals(intent.getAction())) { if (actionsListener != null){ actionsListener.onReceiveForceReloginAction(intent.getExtras()); } }else if (QBServiceConsts.REFRESH_SESSION.equals(intent.getAction())){ if (actionsListener != null){ actionsListener.onReceiveRefreshSessionAction(intent.getExtras()); } } } } public interface GlobalActionsListener{ public void onReceiveChatMessageAction(Bundle extras); public void onReceiveForceReloginAction(Bundle extras); public void onReceiveRefreshSessionAction(Bundle extras); } }
Q-municate/src/main/java/com/quickblox/q_municate/ui/base/ActivityDelegator.java
package com.quickblox.q_municate.ui.base; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.support.v4.content.LocalBroadcastManager; import android.util.Log; import com.facebook.Session; import com.quickblox.module.auth.model.QBProvider; import com.quickblox.q_municate.R; import com.quickblox.q_municate.core.command.Command; import com.quickblox.q_municate.model.AppSession; import com.quickblox.q_municate.model.LoginType; import com.quickblox.q_municate.qb.commands.QBLoginRestCommand; import com.quickblox.q_municate.qb.commands.QBLoginRestWithSocialCommand; import com.quickblox.q_municate.service.QBServiceConsts; import com.quickblox.q_municate.ui.splash.SplashActivity; import com.quickblox.q_municate.utils.ErrorUtils; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; //This class uses to delegate common functionality from different types of activity(Activity, FragmentActivity) public class ActivityDelegator extends BaseActivityDelegator{ private BaseBroadcastReceiver broadcastReceiver; private GlobalBroadcastReceiver globalBroadcastReceiver; private Map<String, List<Command>> broadcastCommandMap = new HashMap<String, List<Command>>(); private GlobalActionsListener actionsListener; public ActivityDelegator(Context context, GlobalActionsListener actionsListener) { super(context); this.actionsListener = actionsListener; } public void forceRelogin(){ ErrorUtils.showError(getContext(), getContext().getString(R.string.dlg_force_relogin_on_token_required)); SplashActivity.start(getContext()); ((Activity)getContext()).finish(); } public void refreshSession() { if (LoginType.EMAIL.equals(AppSession.getSession().getLoginType())) { QBLoginRestCommand.start(getContext(), AppSession.getSession().getUser()); } else { QBLoginRestWithSocialCommand.start(getContext(), QBProvider.FACEBOOK, Session.getActiveSession().getAccessToken(), null); } } public void onCreate() { broadcastReceiver = new BaseBroadcastReceiver(); globalBroadcastReceiver = new GlobalBroadcastReceiver(); } public void addAction(String action, Command command) { List<Command> commandList = broadcastCommandMap.get(action); if(commandList == null){ commandList = new LinkedList<Command>(); broadcastCommandMap.put(action, commandList); } commandList.add(command); } public boolean hasAction(String action) { return broadcastCommandMap.containsKey(action); } public void removeAction(String action) { broadcastCommandMap.remove(action); } public void updateBroadcastActionList() { LocalBroadcastManager.getInstance(getContext()).unregisterReceiver(broadcastReceiver); IntentFilter intentFilter = new IntentFilter(); for (String commandName : broadcastCommandMap.keySet()) { intentFilter.addAction(commandName); } LocalBroadcastManager.getInstance(getContext()).registerReceiver(broadcastReceiver, intentFilter); } public void onPause() { unregisterBroadcastReceiver(); } public void onResume() { registerGlobalReceiver(); updateBroadcastActionList(); } private void registerGlobalReceiver(){ IntentFilter globalActionsIntentFilter = new IntentFilter(); globalActionsIntentFilter.addAction(QBServiceConsts.GOT_CHAT_MESSAGE); globalActionsIntentFilter.addAction(QBServiceConsts.FORCE_RELOGIN); globalActionsIntentFilter.addAction(QBServiceConsts.REFRESH_SESSION); LocalBroadcastManager.getInstance(getContext()).registerReceiver(globalBroadcastReceiver, globalActionsIntentFilter); } private void unregisterBroadcastReceiver() { LocalBroadcastManager.getInstance(getContext()).unregisterReceiver(globalBroadcastReceiver); LocalBroadcastManager.getInstance(getContext()).unregisterReceiver(broadcastReceiver); } private class BaseBroadcastReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { String action = intent.getAction(); if (intent != null && (action) != null) { Log.d("STEPS", "executing " + action); List<Command> commandList = broadcastCommandMap.get(action); if(commandList != null && !commandList.isEmpty()) { for (Command command : commandList) { try { command.execute(intent.getExtras()); } catch (Exception e) { ErrorUtils.logError(e); } } } } } } private class GlobalBroadcastReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { Bundle extras = intent.getExtras(); if (extras != null && QBServiceConsts.GOT_CHAT_MESSAGE.equals(intent.getAction())) { if (actionsListener != null){ actionsListener.onReceiveChatMessageAction(intent.getExtras()); } } else if (QBServiceConsts.FORCE_RELOGIN.equals(intent.getAction())) { if (actionsListener != null){ actionsListener.onReceiveForceReloginAction(intent.getExtras()); } }else if (QBServiceConsts.REFRESH_SESSION.equals(intent.getAction())){ if (actionsListener != null){ actionsListener.onReceiveRefreshSessionAction(intent.getExtras()); } } } } public interface GlobalActionsListener{ public void onReceiveChatMessageAction(Bundle extras); public void onReceiveForceReloginAction(Bundle extras); public void onReceiveRefreshSessionAction(Bundle extras); } }
updated adding commands logic
Q-municate/src/main/java/com/quickblox/q_municate/ui/base/ActivityDelegator.java
updated adding commands logic
<ide><path>-municate/src/main/java/com/quickblox/q_municate/ui/base/ActivityDelegator.java <ide> import com.quickblox.q_municate.utils.ErrorUtils; <ide> <ide> import java.util.HashMap; <del>import java.util.LinkedList; <del>import java.util.List; <add>import java.util.HashSet; <ide> import java.util.Map; <add>import java.util.Set; <ide> <ide> //This class uses to delegate common functionality from different types of activity(Activity, FragmentActivity) <ide> public class ActivityDelegator extends BaseActivityDelegator{ <ide> <ide> private BaseBroadcastReceiver broadcastReceiver; <ide> private GlobalBroadcastReceiver globalBroadcastReceiver; <del> private Map<String, List<Command>> broadcastCommandMap = new HashMap<String, List<Command>>(); <add> private Map<String, Set<Command>> broadcastCommandMap = new HashMap<String, Set<Command>>(); <ide> private GlobalActionsListener actionsListener; <ide> <ide> public ActivityDelegator(Context context, GlobalActionsListener actionsListener) { <ide> } <ide> <ide> public void addAction(String action, Command command) { <del> List<Command> commandList = broadcastCommandMap.get(action); <del> if(commandList == null){ <del> commandList = new LinkedList<Command>(); <del> broadcastCommandMap.put(action, commandList); <add> Set<Command> commandSet = broadcastCommandMap.get(action); <add> if(commandSet == null){ <add> commandSet = new HashSet<Command>(); <add> broadcastCommandMap.put(action, commandSet); <ide> } <del> commandList.add(command); <add> commandSet.add(command); <ide> } <ide> <ide> public boolean hasAction(String action) { <ide> String action = intent.getAction(); <ide> if (intent != null && (action) != null) { <ide> Log.d("STEPS", "executing " + action); <del> List<Command> commandList = broadcastCommandMap.get(action); <del> if(commandList != null && !commandList.isEmpty()) { <del> for (Command command : commandList) { <add> Set<Command> commandSet = broadcastCommandMap.get(action); <add> if(commandSet != null && !commandSet.isEmpty()) { <add> for (Command command : commandSet) { <ide> try { <ide> command.execute(intent.getExtras()); <ide> } catch (Exception e) {
JavaScript
mit
8de8e92e4e8f379615d4df80dd00785dc4249b5d
0
node4good/formage,kzradio/formage,kzradio/formage,node4good/formage
var Class = require('sji'), forms = require('../forms'), MongooseAdminAudit = require('./mongoose_admin_audit.js').MongooseAdminAudit, async = require('async'), _ = require('lodash'); var AdminModel = module.exports = Class.extend({ init: function (options) { options = options || {}; this.name = options.name || null; this.model = options.model || null; this.form_type = options.form_type || forms.AdminForm; this.list = []; this.sortable = null; this.actions = []; this.is_single = false; }, update: function (req, user, document_id, params, callback) { var self = this; var model = self.model; var form_type = self.form_type; var form = null; async.waterfall([ function (cbk) { model.findById(document_id, function (err, document) { cbk(err, document); }); }, function (document, cbk) { form = new form_type(req, {instance: document, data: params}, model); form.is_valid(function (err, valid) { cbk(err || valid); }); }, function (cbk) { form.save(cbk); }, function (document, cbk) { MongooseAdminAudit.logActivity(user, self.name, document._id, 'edit', null, function (err, auditLog) { cbk(null, document); }); }], callback); }, count: function (callback) { if (this.is_single) this.model.count({}, callback); else callback(null, 1); } });
models/AdminModel.js
var Class = require('sji'), forms = require('../forms'), jest = require('jest'), MongooseAdminAudit = require('./mongoose_admin_audit.js').MongooseAdminAudit, async = require('async'), _ = require('lodash'); var AdminModel = module.exports = Class.extend({ init: function (options) { options = options || {}; this.name = options.name || null; this.model = options.model || null; this.form_type = options.form_type || forms.AdminForm; this.resource = options.resource || jest.MongooseResource(this.model); this.list = []; this.sortable = null; this.actions = []; this.is_single = false; }, update: function (req, user, document_id, params, callback) { var self = this; var model = self.model; var form_type = self.form_type; var form = null; async.waterfall([ function (cbk) { model.findById(document_id, function (err, document) { cbk(err, document); }); }, function (document, cbk) { form = new form_type(req, {instance: document, data: params}, model); form.is_valid(function (err, valid) { cbk(err || valid); }); }, function (cbk) { form.save(cbk); }, function (document, cbk) { MongooseAdminAudit.logActivity(user, self.name, document._id, 'edit', null, function (err, auditLog) { cbk(null, document); }); }], callback); }, count: function (callback) { if (this.is_single) this.model.count({}, callback); else callback(null, 1); } });
Less jest
models/AdminModel.js
Less jest
<ide><path>odels/AdminModel.js <ide> var Class = require('sji'), <ide> forms = require('../forms'), <del> jest = require('jest'), <ide> MongooseAdminAudit = require('./mongoose_admin_audit.js').MongooseAdminAudit, <ide> async = require('async'), <ide> _ = require('lodash'); <ide> this.name = options.name || null; <ide> this.model = options.model || null; <ide> this.form_type = options.form_type || forms.AdminForm; <del> this.resource = options.resource || jest.MongooseResource(this.model); <ide> this.list = []; <ide> this.sortable = null; <ide> this.actions = [];
JavaScript
mit
9cb6fd0cedf89f941d0344effd9574399aa3e8ca
0
Rabrennie/mp-select-mini-frontend,Rabrennie/mp-select-mini-frontend
// The Vue build version to load with the `import` command // (runtime-only or standalone) has been set in webpack.base.conf with an alias. import Vue from 'vue'; import VueWebsocket from 'vue-native-websocket'; import VueResource from 'vue-resource'; import App from './App'; import router from './router'; import store from './store'; Vue.config.productionTip = false; Vue.use(VueWebsocket, 'ws://192.168.1.194:81', { store }); store.$socket = Vue.prototype.$socket; Vue.use(VueResource); /* eslint-disable no-new */ new Vue({ el: '#app', router, store, template: '<App/>', components: { App }, });
src/main.js
// The Vue build version to load with the `import` command // (runtime-only or standalone) has been set in webpack.base.conf with an alias. import Vue from 'vue'; import VueWebsocket from 'vue-native-websocket'; import App from './App'; import router from './router'; import store from './store'; Vue.config.productionTip = false; Vue.use(VueWebsocket, 'ws://192.168.1.194:81', { store }); store.$socket = Vue.prototype.$socket; /* eslint-disable no-new */ new Vue({ el: '#app', router, store, template: '<App/>', components: { App }, });
add vue resource to vue object
src/main.js
add vue resource to vue object
<ide><path>rc/main.js <ide> // (runtime-only or standalone) has been set in webpack.base.conf with an alias. <ide> import Vue from 'vue'; <ide> import VueWebsocket from 'vue-native-websocket'; <add>import VueResource from 'vue-resource'; <ide> import App from './App'; <ide> import router from './router'; <ide> import store from './store'; <ide> Vue.use(VueWebsocket, 'ws://192.168.1.194:81', { store }); <ide> store.$socket = Vue.prototype.$socket; <ide> <add>Vue.use(VueResource); <add> <ide> /* eslint-disable no-new */ <ide> new Vue({ <ide> el: '#app',
Java
mit
9f76ebb54bb4e9c5549818968beb2e5f772bd844
0
elBukkit/MagicPlugin,elBukkit/MagicLib,elBukkit/MagicPlugin,elBukkit/MagicPlugin
package com.elmakers.mine.bukkit.action; import com.elmakers.mine.bukkit.api.action.CastContext; import com.elmakers.mine.bukkit.api.spell.SpellResult; import com.elmakers.mine.bukkit.spell.BaseSpell; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Entity; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; public abstract class CompoundEntityAction extends CompoundAction { protected boolean targetSelf; private List<WeakReference<Entity>> entities = new ArrayList<WeakReference<Entity>>(); private int currentEntity = 0; public abstract void prepareEntities(CastContext context, ConfigurationSection parameters, List<WeakReference<Entity>> entities); @Override public void prepare(CastContext context, ConfigurationSection parameters) { super.prepare(context, parameters); targetSelf = parameters.getBoolean("target_self", false); entities.clear(); prepareEntities(context, parameters, entities); } @Override public void reset(CastContext context) { super.reset(context); currentEntity = 0; } @Override public SpellResult perform(CastContext context) { SpellResult result = SpellResult.NO_TARGET; while (currentEntity < entities.size()) { Entity entity = entities.get(currentEntity).get(); if (entity == null) { currentEntity++; skippedActions(context); continue; } actionContext.setTargetEntity(entity); actionContext.setTargetLocation(entity.getLocation()); SpellResult entityResult = performActions(actionContext); result = result.min(entityResult); if (entityResult == SpellResult.PENDING) { break; } currentEntity++; if (currentEntity < entities.size()) { super.reset(context); } } return result; } @Override public void getParameterNames(Collection<String> parameters) { super.getParameterNames(parameters); parameters.add("target_self"); } @Override public void getParameterOptions(Collection<String> examples, String parameterKey) { if (parameterKey.equals("target_self")) { examples.addAll(Arrays.asList((BaseSpell.EXAMPLE_BOOLEANS))); } } @Override public Object clone() { CompoundEntityAction action = (CompoundEntityAction)super.clone(); if (action != null) { action.entities = new ArrayList<WeakReference<Entity>>(this.entities); } return action; } }
src/main/java/com/elmakers/mine/bukkit/action/CompoundEntityAction.java
package com.elmakers.mine.bukkit.action; import com.elmakers.mine.bukkit.api.action.CastContext; import com.elmakers.mine.bukkit.api.spell.SpellResult; import com.elmakers.mine.bukkit.spell.BaseSpell; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.entity.Entity; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; public abstract class CompoundEntityAction extends CompoundAction { protected boolean targetSelf; private List<WeakReference<Entity>> entities = new ArrayList<WeakReference<Entity>>(); private int currentEntity = 0; public abstract void prepareEntities(CastContext context, ConfigurationSection parameters, List<WeakReference<Entity>> entities); @Override public void prepare(CastContext context, ConfigurationSection parameters) { super.prepare(context, parameters); targetSelf = parameters.getBoolean("target_self", false); entities.clear(); prepareEntities(context, parameters, entities); } @Override public void reset(CastContext context) { super.reset(context); currentEntity = 0; } @Override public SpellResult perform(CastContext context) { SpellResult result = SpellResult.NO_TARGET; while (currentEntity < entities.size()) { Entity entity = entities.get(currentEntity).get(); if (entity == null) { skippedActions(context); continue; } actionContext.setTargetEntity(entity); actionContext.setTargetLocation(entity.getLocation()); SpellResult entityResult = performActions(actionContext); result = result.min(entityResult); if (entityResult == SpellResult.PENDING) { break; } currentEntity++; if (currentEntity < entities.size()) { super.reset(context); } } return result; } @Override public void getParameterNames(Collection<String> parameters) { super.getParameterNames(parameters); parameters.add("target_self"); } @Override public void getParameterOptions(Collection<String> examples, String parameterKey) { if (parameterKey.equals("target_self")) { examples.addAll(Arrays.asList((BaseSpell.EXAMPLE_BOOLEANS))); } } @Override public Object clone() { CompoundEntityAction action = (CompoundEntityAction)super.clone(); if (action != null) { action.entities = new ArrayList<WeakReference<Entity>>(this.entities); } return action; } }
Fix potential hang on invalid entities in target list
src/main/java/com/elmakers/mine/bukkit/action/CompoundEntityAction.java
Fix potential hang on invalid entities in target list
<ide><path>rc/main/java/com/elmakers/mine/bukkit/action/CompoundEntityAction.java <ide> Entity entity = entities.get(currentEntity).get(); <ide> if (entity == null) <ide> { <add> currentEntity++; <ide> skippedActions(context); <ide> continue; <ide> }
Java
mit
3b257031189fec8b69eb4f25120713c5b40130c5
0
frostwire/frostwire-jlibtorrent,gubatron/frostwire-jlibtorrent,gubatron/frostwire-jlibtorrent,aldenml/frostwire-jlibtorrent,aldenml/frostwire-jlibtorrent,tchoulihan/frostwire-jlibtorrent,tchoulihan/frostwire-jlibtorrent,gubatron/frostwire-jlibtorrent,frostwire/frostwire-jlibtorrent,frostwire/frostwire-jlibtorrent,tchoulihan/frostwire-jlibtorrent,aldenml/frostwire-jlibtorrent
package com.frostwire.jlibtorrent; import com.frostwire.jlibtorrent.swig.*; import java.io.File; import java.util.ArrayList; import java.util.List; /** * This class represents the information stored in a .torrent file * * @author gubatron * @author aldenml */ public final class TorrentInfo { private final torrent_info ti; public TorrentInfo(torrent_info ti) { this.ti = ti; } public TorrentInfo(byte[] bencodedBytes) { lazy_entry lentry = new lazy_entry(); error_code ec = new error_code(); lazy_entry.bdecode(Vectors.bytes2char_vector(bencodedBytes), lentry, ec); if (ec.value() != 0) { this.ti = null; throw new IllegalArgumentException(ec.message()); } else { this.ti = new torrent_info(lentry); } } /** * Load the torrent file and decode it inside the constructor, for convenience. * <p/> * This might not be the most suitable for applications that * want to be able to report detailed errors on what might go wrong. * * @param torrent */ public TorrentInfo(File torrent) { this(new torrent_info(torrent.getAbsolutePath())); } public torrent_info getSwig() { return this.ti; } /** * The {@link com.frostwire.jlibtorrent.FileStorage} object contains the information on * how to map the pieces to files. * <p/> * It is separated from the torrent_info object because when creating torrents * a storage object needs to be created without having a torrent file. When renaming files * in a storage, the storage needs to make its own copy of the file_storage in order * to make its mapping differ from the one in the torrent file. * * @return */ public FileStorage getFiles() { return new FileStorage(ti.files()); } /** * returns the original (unmodified) file storage for this torrent. This * is used by the web server connection, which needs to request files with the original * names. Filename may be chaged using ``torrent_info::rename_file()``. * * @return */ public FileStorage getOrigFiles() { return new FileStorage(ti.orig_files()); } /** * Adds a tracker to the announce-list. * * @param url */ public void addTracker(String url) { ti.add_tracker(url); } /** * Adds a tracker to the announce-list. The ``tier`` determines the order in * which the trackers are to be tried. * * @param url * @param tier */ public void addTracker(String url, int tier) { ti.add_tracker(url, tier); } /** * will return a sorted vector of ``announce_entry``. * <p/> * Each announce entry contains a string, which is the tracker url, and a tier index. The * tier index is the high-level priority. No matter which trackers that works or not, the * ones with lower tier will always be tried before the one with higher tier number. * * @return */ public List<AnnounceEntry> getTrackers() { announce_entry_vector v = ti.trackers(); int size = (int) v.size(); List<AnnounceEntry> l = new ArrayList<AnnounceEntry>(size); for (int i = 0; i < size; i++) { l.add(new AnnounceEntry(v.get(i))); } return l; } /** * Adds one url to the list of url seeds. Currently, the only transport protocol supported for the url * is http. * * @param url */ public void addUrlSeed(String url) { ti.add_url_seed(url); } /** * Adds one url to the list of url seeds. Currently, the only transport protocol supported for the url * is http. * <p/> * The ``extern_auth`` argument can be used for other athorization schemese than * basic HTTP authorization. If set, it will override any username and password * found in the URL itself. The string will be sent as the HTTP authorization header's * value (without specifying "Basic"). * * @param url * @param externAuth */ public void addUrlSeed(String url, String externAuth) { ti.add_url_seed(url, externAuth); } /** * Adds one url to the list of url seeds. Currently, the only transport protocol supported for the url * is http. * <p/> * he ``extern_auth`` argument can be used for other athorization schemese than * basic HTTP authorization. If set, it will override any username and password * found in the URL itself. The string will be sent as the HTTP authorization header's * value (without specifying "Basic"). * <p/> * The ``extra_headers`` argument defaults to an empty list, but can be used to * insert custom HTTP headers in the requests to a specific web seed. * * @param url * @param externAuth * @param extraHeaders */ public void addUrlSeed(String url, String externAuth, List<Pair<String, String>> extraHeaders) { string_string_pair_vector v = new string_string_pair_vector(); for (int i = 0; i < extraHeaders.size(); i++) { v.add(extraHeaders.get(i).to_string_string_pair()); } ti.add_url_seed(url, externAuth, v); } /** * Adds one url to the list of http seeds. Currently, the only transport protocol supported for the url * is http. * * @param url */ public void addHttpSeed(String url) { ti.add_url_seed(url); } /** * Adds one url to the list of http seeds. Currently, the only transport protocol supported for the url * is http. * <p/> * The ``extern_auth`` argument can be used for other athorization schemese than * basic HTTP authorization. If set, it will override any username and password * found in the URL itself. The string will be sent as the HTTP authorization header's * value (without specifying "Basic"). * * @param url * @param externAuth */ public void addHttpSeed(String url, String externAuth) { ti.add_url_seed(url, externAuth); } /** * Adds one url to the list of http seeds. Currently, the only transport protocol supported for the url * is http. * <p/> * he ``extern_auth`` argument can be used for other athorization schemese than * basic HTTP authorization. If set, it will override any username and password * found in the URL itself. The string will be sent as the HTTP authorization header's * value (without specifying "Basic"). * <p/> * The ``extra_headers`` argument defaults to an empty list, but can be used to * insert custom HTTP headers in the requests to a specific web seed. * * @param url * @param externAuth * @param extraHeaders */ public void addHttpSeed(String url, String externAuth, List<Pair<String, String>> extraHeaders) { string_string_pair_vector v = new string_string_pair_vector(); for (int i = 0; i < extraHeaders.size(); i++) { v.add(extraHeaders.get(i).to_string_string_pair()); } ti.add_url_seed(url, externAuth, v); } /** * returns all url seeds and http seeds in the torrent. Each entry * is a ``web_seed_entry`` and may refer to either a url seed or http seed. * * @return */ public List<WebSeedEntry> getWebSeeds() { web_seed_entry_vector v = ti.web_seeds(); int size = (int) v.size(); List<WebSeedEntry> l = new ArrayList<WebSeedEntry>(size); for (int i = 0; i < size; i++) { l.add(new WebSeedEntry(v.get(i))); } return l; } /** * The total number of bytes the torrent-file represents (all the files in it). * * @return */ public long getTotalSize() { return ti.total_size(); } /** * The number of byte for each piece. * <p/> * The difference between {@link #getPieceSize(int)} and {@link #getPieceLength()} is that * {@link #getPieceSize(int)} takes the piece index as argument and gives you the exact size * of that piece. It will always be the same as {@link #getPieceLength()} except in the case * of the last piece, which may be smaller. * * @return */ public int getPieceLength() { return ti.piece_length(); } /** * The total number of pieces. * * @return */ public int getNumPieces() { return ti.num_pieces(); } /** * returns the info-hash of the torrent. * * @return */ public Sha1Hash getInfoHash() { return new Sha1Hash(ti.info_hash()); } /** * If you need index-access to files you can use the ``num_files()`` and ``file_at()`` * to access files using indices. * * @return */ public int getNumFiles() { return ti.num_files(); } /** * If you need index-access to files you can use the {@link #getNumFiles()} * and {@link #getFileAt(int)} to access files using indices. * * @return */ public FileEntry getFileAt(int index) { return new FileEntry(ti.file_at(index)); } /** * This function will map a piece index, a byte offset within that piece and * a size (in bytes) into the corresponding files with offsets where that data * for that piece is supposed to be stored. * * @param piece * @param offset * @param size * @return * @see com.frostwire.jlibtorrent.FileSlice */ public ArrayList<FileSlice> mapBlock(int piece, long offset, int size) { file_slice_vector v = ti.map_block(piece, offset, size); int vSize = (int) v.size(); ArrayList<FileSlice> l = new ArrayList<FileSlice>(vSize); for (int i = 0; i < vSize; i++) { l.add(new FileSlice(v.get(i))); } return l; } /** * This function will map a range in a specific file into a range in the torrent. * The {@code offset} parameter is the offset in the file, given in bytes, where * 0 is the start of the file. * <p/> * The input range is assumed to be valid within the torrent. {@code offset + size} * is not allowed to be greater than the file size. {@code index} * must refer to a valid file, i.e. it cannot be {@code >= getNumFiles()}. * * @param file * @param offset * @param size * @return * @see com.frostwire.jlibtorrent.PeerRequest */ public PeerRequest mapFile(int file, long offset, int size) { return new PeerRequest(ti.map_file(file, offset, size)); } /** * Returns the SSL root certificate for the torrent, if it is an SSL * torrent. Otherwise returns an empty string. The certificate is * the public certificate in x509 format. * * @return */ public String getSslCert() { return ti.ssl_cert(); } /** * Returns true if this torrent_info object has a torrent loaded. * <p/> * This is primarily used to determine if a magnet link has had its * metadata resolved yet or not. * * @return */ public boolean isValid() { return ti.is_valid(); } /** * returns true if this torrent is private. i.e., it should not be * distributed on the trackerless network (the kademlia DHT). * * @return */ public boolean isPrivate() { return ti.priv(); } /** * returns true if this is an i2p torrent. This is determined by whether * or not it has a tracker whose URL domain name ends with ".i2p". i2p * torrents disable the DHT and local peer discovery as well as talking * to peers over anything other than the i2p network. * * @return */ public boolean isI2P() { return ti.is_i2p(); } public int getPieceSize(int index) { return ti.piece_size(index); } /** * takes a piece-index and returns the 20-bytes sha1-hash for that * piece and ``info_hash()`` returns the 20-bytes sha1-hash for the info-section of the * torrent file. * * @param index * @return */ public Sha1Hash getHashForPiece(int index) { return new Sha1Hash(ti.hash_for_piece(index)); } /** * returns the name of the torrent. * <p/> * the name is an UTF-8 encoded strings. * * @return */ public String getName() { return ti.name(); } /** * returns the creation date of * the torrent as time_t (`posix time`_). If there's no time stamp in the torrent file, * a value of zero is returned. * * @return */ public int getCreationDate() { return ti.get_creation_date(); } /** * returns the creator string in the torrent. If there is no creator string * it will return an empty string. * * @return */ public String getCreator() { return ti.creator(); } /** * returns the comment associated with the torrent. If there's no comment, * it will return an empty string. * <p/> * the comment is an UTF-8 encoded strings. * * @return */ public String getComment() { return ti.comment(); } /** * Generates a magnet URI from the specified torrent. If the torrent * is invalid, null is returned. * <p/> * For more information about magnet links, see magnet-links_. * * @return */ public String makeMagnetUri() { return ti.is_valid() ? libtorrent.make_magnet_uri(ti) : null; } public Entry toEntry() { return new Entry(new create_torrent(ti).generate()); } public byte[] bencode() { return toEntry().bencode(); } public static TorrentInfo bdecode(byte[] data) { lazy_entry e = new lazy_entry(); error_code ec = new error_code(); int ret = lazy_entry.bdecode(Vectors.bytes2char_vector(data), e, ec); if (ret == 0) { return new TorrentInfo(new torrent_info(e)); } else { throw new IllegalArgumentException("Can't decode data"); } } }
src/com/frostwire/jlibtorrent/TorrentInfo.java
package com.frostwire.jlibtorrent; import com.frostwire.jlibtorrent.swig.*; import java.io.File; import java.util.ArrayList; import java.util.List; /** * This class represents the information stored in a .torrent file * * @author gubatron * @author aldenml */ public final class TorrentInfo { private final torrent_info ti; public TorrentInfo(torrent_info ti) { this.ti = ti; } public TorrentInfo(byte[] bencodedBytes) { lazy_entry lentry = new lazy_entry(); error_code ec = new error_code(); lazy_entry.bdecode(Vectors.bytes2char_vector(bencodedBytes), lentry, ec); if (ec.value() != 0) { throw new IllegalArgumentException(ec.message()); } else { this.ti = new torrent_info(lentry); } } /** * Load the torrent file and decode it inside the constructor, for convenience. * <p/> * This might not be the most suitable for applications that * want to be able to report detailed errors on what might go wrong. * * @param torrent */ public TorrentInfo(File torrent) { this(new torrent_info(torrent.getAbsolutePath())); } public torrent_info getSwig() { return this.ti; } /** * The {@link com.frostwire.jlibtorrent.FileStorage} object contains the information on * how to map the pieces to files. * <p/> * It is separated from the torrent_info object because when creating torrents * a storage object needs to be created without having a torrent file. When renaming files * in a storage, the storage needs to make its own copy of the file_storage in order * to make its mapping differ from the one in the torrent file. * * @return */ public FileStorage getFiles() { return new FileStorage(ti.files()); } /** * returns the original (unmodified) file storage for this torrent. This * is used by the web server connection, which needs to request files with the original * names. Filename may be chaged using ``torrent_info::rename_file()``. * * @return */ public FileStorage getOrigFiles() { return new FileStorage(ti.orig_files()); } /** * Adds a tracker to the announce-list. * * @param url */ public void addTracker(String url) { ti.add_tracker(url); } /** * Adds a tracker to the announce-list. The ``tier`` determines the order in * which the trackers are to be tried. * * @param url * @param tier */ public void addTracker(String url, int tier) { ti.add_tracker(url, tier); } /** * will return a sorted vector of ``announce_entry``. * <p/> * Each announce entry contains a string, which is the tracker url, and a tier index. The * tier index is the high-level priority. No matter which trackers that works or not, the * ones with lower tier will always be tried before the one with higher tier number. * * @return */ public List<AnnounceEntry> getTrackers() { announce_entry_vector v = ti.trackers(); int size = (int) v.size(); List<AnnounceEntry> l = new ArrayList<AnnounceEntry>(size); for (int i = 0; i < size; i++) { l.add(new AnnounceEntry(v.get(i))); } return l; } /** * Adds one url to the list of url seeds. Currently, the only transport protocol supported for the url * is http. * * @param url */ public void addUrlSeed(String url) { ti.add_url_seed(url); } /** * Adds one url to the list of url seeds. Currently, the only transport protocol supported for the url * is http. * <p/> * The ``extern_auth`` argument can be used for other athorization schemese than * basic HTTP authorization. If set, it will override any username and password * found in the URL itself. The string will be sent as the HTTP authorization header's * value (without specifying "Basic"). * * @param url * @param externAuth */ public void addUrlSeed(String url, String externAuth) { ti.add_url_seed(url, externAuth); } /** * Adds one url to the list of url seeds. Currently, the only transport protocol supported for the url * is http. * <p/> * he ``extern_auth`` argument can be used for other athorization schemese than * basic HTTP authorization. If set, it will override any username and password * found in the URL itself. The string will be sent as the HTTP authorization header's * value (without specifying "Basic"). * <p/> * The ``extra_headers`` argument defaults to an empty list, but can be used to * insert custom HTTP headers in the requests to a specific web seed. * * @param url * @param externAuth * @param extraHeaders */ public void addUrlSeed(String url, String externAuth, List<Pair<String, String>> extraHeaders) { string_string_pair_vector v = new string_string_pair_vector(); for (int i = 0; i < extraHeaders.size(); i++) { v.add(extraHeaders.get(i).to_string_string_pair()); } ti.add_url_seed(url, externAuth, v); } /** * Adds one url to the list of http seeds. Currently, the only transport protocol supported for the url * is http. * * @param url */ public void addHttpSeed(String url) { ti.add_url_seed(url); } /** * Adds one url to the list of http seeds. Currently, the only transport protocol supported for the url * is http. * <p/> * The ``extern_auth`` argument can be used for other athorization schemese than * basic HTTP authorization. If set, it will override any username and password * found in the URL itself. The string will be sent as the HTTP authorization header's * value (without specifying "Basic"). * * @param url * @param externAuth */ public void addHttpSeed(String url, String externAuth) { ti.add_url_seed(url, externAuth); } /** * Adds one url to the list of http seeds. Currently, the only transport protocol supported for the url * is http. * <p/> * he ``extern_auth`` argument can be used for other athorization schemese than * basic HTTP authorization. If set, it will override any username and password * found in the URL itself. The string will be sent as the HTTP authorization header's * value (without specifying "Basic"). * <p/> * The ``extra_headers`` argument defaults to an empty list, but can be used to * insert custom HTTP headers in the requests to a specific web seed. * * @param url * @param externAuth * @param extraHeaders */ public void addHttpSeed(String url, String externAuth, List<Pair<String, String>> extraHeaders) { string_string_pair_vector v = new string_string_pair_vector(); for (int i = 0; i < extraHeaders.size(); i++) { v.add(extraHeaders.get(i).to_string_string_pair()); } ti.add_url_seed(url, externAuth, v); } /** * returns all url seeds and http seeds in the torrent. Each entry * is a ``web_seed_entry`` and may refer to either a url seed or http seed. * * @return */ public List<WebSeedEntry> getWebSeeds() { web_seed_entry_vector v = ti.web_seeds(); int size = (int) v.size(); List<WebSeedEntry> l = new ArrayList<WebSeedEntry>(size); for (int i = 0; i < size; i++) { l.add(new WebSeedEntry(v.get(i))); } return l; } /** * The total number of bytes the torrent-file represents (all the files in it). * * @return */ public long getTotalSize() { return ti.total_size(); } /** * The number of byte for each piece. * <p/> * The difference between {@link #getPieceSize(int)} and {@link #getPieceLength()} is that * {@link #getPieceSize(int)} takes the piece index as argument and gives you the exact size * of that piece. It will always be the same as {@link #getPieceLength()} except in the case * of the last piece, which may be smaller. * * @return */ public int getPieceLength() { return ti.piece_length(); } /** * The total number of pieces. * * @return */ public int getNumPieces() { return ti.num_pieces(); } /** * returns the info-hash of the torrent. * * @return */ public Sha1Hash getInfoHash() { return new Sha1Hash(ti.info_hash()); } /** * If you need index-access to files you can use the ``num_files()`` and ``file_at()`` * to access files using indices. * * @return */ public int getNumFiles() { return ti.num_files(); } /** * If you need index-access to files you can use the {@link #getNumFiles()} * and {@link #getFileAt(int)} to access files using indices. * * @return */ public FileEntry getFileAt(int index) { return new FileEntry(ti.file_at(index)); } /** * This function will map a piece index, a byte offset within that piece and * a size (in bytes) into the corresponding files with offsets where that data * for that piece is supposed to be stored. * * @param piece * @param offset * @param size * @return * @see com.frostwire.jlibtorrent.FileSlice */ public ArrayList<FileSlice> mapBlock(int piece, long offset, int size) { file_slice_vector v = ti.map_block(piece, offset, size); int vSize = (int) v.size(); ArrayList<FileSlice> l = new ArrayList<FileSlice>(vSize); for (int i = 0; i < vSize; i++) { l.add(new FileSlice(v.get(i))); } return l; } /** * This function will map a range in a specific file into a range in the torrent. * The {@code offset} parameter is the offset in the file, given in bytes, where * 0 is the start of the file. * <p/> * The input range is assumed to be valid within the torrent. {@code offset + size} * is not allowed to be greater than the file size. {@code index} * must refer to a valid file, i.e. it cannot be {@code >= getNumFiles()}. * * @param file * @param offset * @param size * @return * @see com.frostwire.jlibtorrent.PeerRequest */ public PeerRequest mapFile(int file, long offset, int size) { return new PeerRequest(ti.map_file(file, offset, size)); } /** * Returns the SSL root certificate for the torrent, if it is an SSL * torrent. Otherwise returns an empty string. The certificate is * the public certificate in x509 format. * * @return */ public String getSslCert() { return ti.ssl_cert(); } /** * Returns true if this torrent_info object has a torrent loaded. * <p/> * This is primarily used to determine if a magnet link has had its * metadata resolved yet or not. * * @return */ public boolean isValid() { return ti.is_valid(); } /** * returns true if this torrent is private. i.e., it should not be * distributed on the trackerless network (the kademlia DHT). * * @return */ public boolean isPrivate() { return ti.priv(); } /** * returns true if this is an i2p torrent. This is determined by whether * or not it has a tracker whose URL domain name ends with ".i2p". i2p * torrents disable the DHT and local peer discovery as well as talking * to peers over anything other than the i2p network. * * @return */ public boolean isI2P() { return ti.is_i2p(); } public int getPieceSize(int index) { return ti.piece_size(index); } /** * takes a piece-index and returns the 20-bytes sha1-hash for that * piece and ``info_hash()`` returns the 20-bytes sha1-hash for the info-section of the * torrent file. * * @param index * @return */ public Sha1Hash getHashForPiece(int index) { return new Sha1Hash(ti.hash_for_piece(index)); } /** * returns the name of the torrent. * <p/> * the name is an UTF-8 encoded strings. * * @return */ public String getName() { return ti.name(); } /** * returns the creation date of * the torrent as time_t (`posix time`_). If there's no time stamp in the torrent file, * a value of zero is returned. * * @return */ public int getCreationDate() { return ti.get_creation_date(); } /** * returns the creator string in the torrent. If there is no creator string * it will return an empty string. * * @return */ public String getCreator() { return ti.creator(); } /** * returns the comment associated with the torrent. If there's no comment, * it will return an empty string. * <p/> * the comment is an UTF-8 encoded strings. * * @return */ public String getComment() { return ti.comment(); } /** * Generates a magnet URI from the specified torrent. If the torrent * is invalid, null is returned. * <p/> * For more information about magnet links, see magnet-links_. * * @return */ public String makeMagnetUri() { return ti.is_valid() ? libtorrent.make_magnet_uri(ti) : null; } public Entry toEntry() { return new Entry(new create_torrent(ti).generate()); } public byte[] bencode() { return toEntry().bencode(); } public static TorrentInfo bdecode(byte[] data) { lazy_entry e = new lazy_entry(); error_code ec = new error_code(); int ret = lazy_entry.bdecode(Vectors.bytes2char_vector(data), e, ec); if (ret == 0) { return new TorrentInfo(new torrent_info(e)); } else { throw new IllegalArgumentException("Can't decode data"); } } }
use IllegalArgumentException
src/com/frostwire/jlibtorrent/TorrentInfo.java
use IllegalArgumentException
<ide><path>rc/com/frostwire/jlibtorrent/TorrentInfo.java <ide> lazy_entry.bdecode(Vectors.bytes2char_vector(bencodedBytes), lentry, ec); <ide> <ide> if (ec.value() != 0) { <add> this.ti = null; <ide> throw new IllegalArgumentException(ec.message()); <ide> } else { <ide> this.ti = new torrent_info(lentry);
Java
apache-2.0
bee6ac82d6ed41dc7e5262c72100c7274a1e7880
0
mcherkasov/ignite,shroman/ignite,nivanov/ignite,andrey-kuznetsov/ignite,apache/ignite,SharplEr/ignite,ascherbakoff/ignite,gargvish/ignite,sk0x50/ignite,daradurvs/ignite,vladisav/ignite,shurun19851206/ignite,SharplEr/ignite,agura/incubator-ignite,samaitra/ignite,chandresh-pancholi/ignite,akuznetsov-gridgain/ignite,chandresh-pancholi/ignite,shroman/ignite,alexzaitzev/ignite,VladimirErshov/ignite,VladimirErshov/ignite,SharplEr/ignite,tkpanther/ignite,samaitra/ignite,irudyak/ignite,zzcclp/ignite,louishust/incubator-ignite,kromulan/ignite,vsisko/incubator-ignite,dream-x/ignite,thuTom/ignite,BiryukovVA/ignite,tkpanther/ignite,chandresh-pancholi/ignite,ptupitsyn/ignite,dmagda/incubator-ignite,ryanzz/ignite,zzcclp/ignite,shurun19851206/ignite,SharplEr/ignite,thuTom/ignite,BiryukovVA/ignite,alexzaitzev/ignite,ntikhonov/ignite,akuznetsov-gridgain/ignite,ryanzz/ignite,adeelmahmood/ignite,pperalta/ignite,vladisav/ignite,leveyj/ignite,kidaa/incubator-ignite,samaitra/ignite,kidaa/incubator-ignite,vladisav/ignite,dream-x/ignite,vadopolski/ignite,samaitra/ignite,pperalta/ignite,apacheignite/ignite,alexzaitzev/ignite,vldpyatkov/ignite,endian675/ignite,BiryukovVA/ignite,DoudTechData/ignite,rfqu/ignite,ptupitsyn/ignite,adeelmahmood/ignite,adeelmahmood/ignite,vladisav/ignite,apacheignite/ignite,apacheignite/ignite,vadopolski/ignite,gargvish/ignite,voipp/ignite,svladykin/ignite,SomeFire/ignite,ascherbakoff/ignite,nizhikov/ignite,louishust/incubator-ignite,vsuslov/incubator-ignite,NSAmelchev/ignite,psadusumilli/ignite,daradurvs/ignite,apache/ignite,ptupitsyn/ignite,irudyak/ignite,agura/incubator-ignite,mcherkasov/ignite,svladykin/ignite,avinogradovgg/ignite,amirakhmedov/ignite,andrey-kuznetsov/ignite,amirakhmedov/ignite,daradurvs/ignite,dlnufox/ignite,amirakhmedov/ignite,rfqu/ignite,shroman/ignite,ntikhonov/ignite,vsuslov/incubator-ignite,DoudTechData/ignite,vladisav/ignite,ashutakGG/incubator-ignite,NSAmelchev/ignite,afinka77/ignite,afinka77/ignite,shurun19851206/ignite,voipp/ignite,svladykin/ignite,alexzaitzev/ignite,endian675/ignite,samaitra/ignite,agura/incubator-ignite,andrey-kuznetsov/ignite,svladykin/ignite,endian675/ignite,ashutakGG/incubator-ignite,dmagda/incubator-ignite,chandresh-pancholi/ignite,vsisko/incubator-ignite,apacheignite/ignite,psadusumilli/ignite,thuTom/ignite,mcherkasov/ignite,DoudTechData/ignite,rfqu/ignite,vladisav/ignite,murador/ignite,alexzaitzev/ignite,nivanov/ignite,apache/ignite,StalkXT/ignite,amirakhmedov/ignite,pperalta/ignite,BiryukovVA/ignite,gargvish/ignite,kromulan/ignite,leveyj/ignite,DoudTechData/ignite,ilantukh/ignite,rfqu/ignite,vsuslov/incubator-ignite,WilliamDo/ignite,dlnufox/ignite,WilliamDo/ignite,svladykin/ignite,NSAmelchev/ignite,murador/ignite,nizhikov/ignite,agura/incubator-ignite,vldpyatkov/ignite,akuznetsov-gridgain/ignite,shurun19851206/ignite,agura/incubator-ignite,ryanzz/ignite,dream-x/ignite,StalkXT/ignite,kromulan/ignite,nivanov/ignite,shroman/ignite,dmagda/incubator-ignite,sylentprayer/ignite,tkpanther/ignite,zzcclp/ignite,ilantukh/ignite,pperalta/ignite,ilantukh/ignite,pperalta/ignite,sk0x50/ignite,nivanov/ignite,dlnufox/ignite,vadopolski/ignite,svladykin/ignite,arijitt/incubator-ignite,ascherbakoff/ignite,alexzaitzev/ignite,chandresh-pancholi/ignite,adeelmahmood/ignite,daradurvs/ignite,irudyak/ignite,nizhikov/ignite,nizhikov/ignite,BiryukovVA/ignite,louishust/incubator-ignite,ascherbakoff/ignite,a1vanov/ignite,ilantukh/ignite,chandresh-pancholi/ignite,SharplEr/ignite,VladimirErshov/ignite,endian675/ignite,agoncharuk/ignite,StalkXT/ignite,NSAmelchev/ignite,a1vanov/ignite,vsisko/incubator-ignite,leveyj/ignite,mcherkasov/ignite,ascherbakoff/ignite,voipp/ignite,tkpanther/ignite,samaitra/ignite,ntikhonov/ignite,shroman/ignite,dlnufox/ignite,vsuslov/incubator-ignite,thuTom/ignite,arijitt/incubator-ignite,SomeFire/ignite,shroman/ignite,ilantukh/ignite,afinka77/ignite,andrey-kuznetsov/ignite,ryanzz/ignite,alexzaitzev/ignite,shurun19851206/ignite,gargvish/ignite,avinogradovgg/ignite,BiryukovVA/ignite,mcherkasov/ignite,gargvish/ignite,apacheignite/ignite,murador/ignite,VladimirErshov/ignite,voipp/ignite,shroman/ignite,sylentprayer/ignite,f7753/ignite,wmz7year/ignite,wmz7year/ignite,zzcclp/ignite,vadopolski/ignite,NSAmelchev/ignite,andrey-kuznetsov/ignite,sk0x50/ignite,ptupitsyn/ignite,VladimirErshov/ignite,ilantukh/ignite,irudyak/ignite,wmz7year/ignite,f7753/ignite,voipp/ignite,andrey-kuznetsov/ignite,a1vanov/ignite,f7753/ignite,kromulan/ignite,a1vanov/ignite,SomeFire/ignite,wmz7year/ignite,murador/ignite,sk0x50/ignite,sk0x50/ignite,irudyak/ignite,voipp/ignite,vladisav/ignite,rfqu/ignite,louishust/incubator-ignite,afinka77/ignite,chandresh-pancholi/ignite,akuznetsov-gridgain/ignite,ptupitsyn/ignite,arijitt/incubator-ignite,ntikhonov/ignite,ptupitsyn/ignite,ntikhonov/ignite,afinka77/ignite,BiryukovVA/ignite,nivanov/ignite,shurun19851206/ignite,ntikhonov/ignite,StalkXT/ignite,ascherbakoff/ignite,vsisko/incubator-ignite,adeelmahmood/ignite,StalkXT/ignite,wmz7year/ignite,leveyj/ignite,agoncharuk/ignite,adeelmahmood/ignite,avinogradovgg/ignite,ptupitsyn/ignite,ashutakGG/incubator-ignite,xtern/ignite,SomeFire/ignite,kromulan/ignite,dlnufox/ignite,apacheignite/ignite,apache/ignite,vsisko/incubator-ignite,svladykin/ignite,ilantukh/ignite,StalkXT/ignite,NSAmelchev/ignite,agura/incubator-ignite,ntikhonov/ignite,sk0x50/ignite,zzcclp/ignite,agoncharuk/ignite,SomeFire/ignite,vsuslov/incubator-ignite,mcherkasov/ignite,kromulan/ignite,daradurvs/ignite,VladimirErshov/ignite,amirakhmedov/ignite,a1vanov/ignite,shroman/ignite,agoncharuk/ignite,alexzaitzev/ignite,vsisko/incubator-ignite,tkpanther/ignite,vldpyatkov/ignite,f7753/ignite,gargvish/ignite,agoncharuk/ignite,chandresh-pancholi/ignite,pperalta/ignite,wmz7year/ignite,nizhikov/ignite,vadopolski/ignite,voipp/ignite,andrey-kuznetsov/ignite,andrey-kuznetsov/ignite,StalkXT/ignite,afinka77/ignite,BiryukovVA/ignite,SharplEr/ignite,arijitt/incubator-ignite,sk0x50/ignite,daradurvs/ignite,ilantukh/ignite,vldpyatkov/ignite,ascherbakoff/ignite,nivanov/ignite,shurun19851206/ignite,irudyak/ignite,murador/ignite,kidaa/incubator-ignite,sylentprayer/ignite,NSAmelchev/ignite,ilantukh/ignite,samaitra/ignite,leveyj/ignite,daradurvs/ignite,kidaa/incubator-ignite,apacheignite/ignite,apacheignite/ignite,irudyak/ignite,zzcclp/ignite,dream-x/ignite,gargvish/ignite,sk0x50/ignite,tkpanther/ignite,DoudTechData/ignite,ryanzz/ignite,ashutakGG/incubator-ignite,alexzaitzev/ignite,adeelmahmood/ignite,vsuslov/incubator-ignite,vldpyatkov/ignite,nizhikov/ignite,dmagda/incubator-ignite,VladimirErshov/ignite,amirakhmedov/ignite,tkpanther/ignite,dlnufox/ignite,dlnufox/ignite,endian675/ignite,sylentprayer/ignite,samaitra/ignite,samaitra/ignite,voipp/ignite,agoncharuk/ignite,xtern/ignite,shurun19851206/ignite,zzcclp/ignite,ryanzz/ignite,thuTom/ignite,dmagda/incubator-ignite,dream-x/ignite,apache/ignite,pperalta/ignite,louishust/incubator-ignite,WilliamDo/ignite,nizhikov/ignite,ashutakGG/incubator-ignite,NSAmelchev/ignite,SomeFire/ignite,kidaa/incubator-ignite,avinogradovgg/ignite,agoncharuk/ignite,wmz7year/ignite,amirakhmedov/ignite,agura/incubator-ignite,rfqu/ignite,andrey-kuznetsov/ignite,dmagda/incubator-ignite,samaitra/ignite,mcherkasov/ignite,xtern/ignite,xtern/ignite,mcherkasov/ignite,apache/ignite,psadusumilli/ignite,irudyak/ignite,afinka77/ignite,rfqu/ignite,WilliamDo/ignite,avinogradovgg/ignite,psadusumilli/ignite,xtern/ignite,apache/ignite,endian675/ignite,SharplEr/ignite,StalkXT/ignite,vadopolski/ignite,dmagda/incubator-ignite,chandresh-pancholi/ignite,shroman/ignite,SharplEr/ignite,psadusumilli/ignite,psadusumilli/ignite,ashutakGG/incubator-ignite,nivanov/ignite,apache/ignite,dmagda/incubator-ignite,andrey-kuznetsov/ignite,f7753/ignite,xtern/ignite,vadopolski/ignite,avinogradovgg/ignite,daradurvs/ignite,SharplEr/ignite,avinogradovgg/ignite,SomeFire/ignite,DoudTechData/ignite,f7753/ignite,a1vanov/ignite,amirakhmedov/ignite,vldpyatkov/ignite,apache/ignite,vsisko/incubator-ignite,thuTom/ignite,louishust/incubator-ignite,sylentprayer/ignite,a1vanov/ignite,ilantukh/ignite,adeelmahmood/ignite,agura/incubator-ignite,kromulan/ignite,xtern/ignite,f7753/ignite,akuznetsov-gridgain/ignite,BiryukovVA/ignite,f7753/ignite,SomeFire/ignite,thuTom/ignite,akuznetsov-gridgain/ignite,wmz7year/ignite,ptupitsyn/ignite,WilliamDo/ignite,murador/ignite,a1vanov/ignite,vsisko/incubator-ignite,ryanzz/ignite,ptupitsyn/ignite,BiryukovVA/ignite,daradurvs/ignite,psadusumilli/ignite,pperalta/ignite,shroman/ignite,tkpanther/ignite,murador/ignite,dream-x/ignite,endian675/ignite,sylentprayer/ignite,WilliamDo/ignite,WilliamDo/ignite,VladimirErshov/ignite,vadopolski/ignite,dream-x/ignite,sylentprayer/ignite,zzcclp/ignite,sylentprayer/ignite,irudyak/ignite,leveyj/ignite,murador/ignite,vldpyatkov/ignite,arijitt/incubator-ignite,ntikhonov/ignite,arijitt/incubator-ignite,leveyj/ignite,dlnufox/ignite,daradurvs/ignite,vldpyatkov/ignite,SomeFire/ignite,ryanzz/ignite,vladisav/ignite,ascherbakoff/ignite,WilliamDo/ignite,endian675/ignite,ascherbakoff/ignite,agoncharuk/ignite,xtern/ignite,DoudTechData/ignite,kromulan/ignite,leveyj/ignite,afinka77/ignite,DoudTechData/ignite,xtern/ignite,dream-x/ignite,StalkXT/ignite,voipp/ignite,nizhikov/ignite,thuTom/ignite,rfqu/ignite,kidaa/incubator-ignite,amirakhmedov/ignite,nizhikov/ignite,psadusumilli/ignite,NSAmelchev/ignite,nivanov/ignite,ptupitsyn/ignite,sk0x50/ignite,gargvish/ignite,SomeFire/ignite
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.managers.discovery; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cluster.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.events.*; import org.apache.ignite.internal.managers.*; import org.apache.ignite.internal.managers.communication.*; import org.apache.ignite.internal.managers.eventstorage.*; import org.apache.ignite.internal.processors.affinity.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.jobmetrics.*; import org.apache.ignite.internal.processors.security.*; import org.apache.ignite.internal.util.*; import org.apache.ignite.internal.util.future.*; import org.apache.ignite.internal.util.lang.*; import org.apache.ignite.internal.util.tostring.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.internal.util.worker.*; import org.apache.ignite.lang.*; import org.apache.ignite.plugin.security.*; import org.apache.ignite.plugin.segmentation.*; import org.apache.ignite.spi.*; import org.apache.ignite.spi.discovery.*; import org.apache.ignite.thread.*; import org.jetbrains.annotations.*; import org.jsr166.*; import java.io.*; import java.lang.management.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import java.util.zip.*; import static java.util.concurrent.TimeUnit.*; import static org.apache.ignite.events.EventType.*; import static org.apache.ignite.internal.IgniteNodeAttributes.*; import static org.apache.ignite.internal.IgniteVersionUtils.*; import static org.apache.ignite.plugin.segmentation.SegmentationPolicy.*; /** * Discovery SPI manager. */ public class GridDiscoveryManager extends GridManagerAdapter<DiscoverySpi> { /** Fake key for {@code null}-named caches. Used inside {@link DiscoCache}. */ private static final String NULL_CACHE_NAME = UUID.randomUUID().toString(); /** Metrics update frequency. */ private static final long METRICS_UPDATE_FREQ = 3000; /** */ private static final MemoryMXBean mem = ManagementFactory.getMemoryMXBean(); /** */ private static final OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean(); /** */ private static final RuntimeMXBean rt = ManagementFactory.getRuntimeMXBean(); /** */ private static final ThreadMXBean threads = ManagementFactory.getThreadMXBean(); /** */ private static final Collection<GarbageCollectorMXBean> gc = ManagementFactory.getGarbageCollectorMXBeans(); /** */ private static final String PREFIX = "Topology snapshot"; /** Discovery cached history size. */ protected static final int DISCOVERY_HISTORY_SIZE = 100; /** Predicate filtering out daemon nodes. */ private static final IgnitePredicate<ClusterNode> daemonFilter = new P1<ClusterNode>() { @Override public boolean apply(ClusterNode n) { return !n.isDaemon(); } }; /** Disco history entries comparator. */ private static final Comparator<Map.Entry<AffinityTopologyVersion, DiscoCache>> histCmp = new Comparator<Map.Entry<AffinityTopologyVersion, DiscoCache>>() { @Override public int compare(Map.Entry<AffinityTopologyVersion, DiscoCache> o1, Map.Entry<AffinityTopologyVersion, DiscoCache> o2) { return o1.getKey().compareTo(o2.getKey()); } }; /** Discovery event worker. */ private final DiscoveryWorker discoWrk = new DiscoveryWorker(); /** Network segment check worker. */ private SegmentCheckWorker segChkWrk; /** Network segment check thread. */ private IgniteThread segChkThread; /** Last logged topology. */ private final AtomicLong lastLoggedTop = new AtomicLong(); /** Local node. */ private ClusterNode locNode; /** Local node daemon flag. */ private boolean isLocDaemon; /** {@code True} if resolvers were configured and network segment check is enabled. */ private boolean hasRslvrs; /** Last segment check result. */ private final AtomicBoolean lastSegChkRes = new AtomicBoolean(true); /** Topology cache history. */ private final Map<AffinityTopologyVersion, DiscoCache> discoCacheHist = new GridBoundedConcurrentLinkedHashMap<>(DISCOVERY_HISTORY_SIZE, DISCOVERY_HISTORY_SIZE, 0.7f, 1); /** Topology snapshots history. */ private volatile Map<Long, Collection<ClusterNode>> topHist = new HashMap<>(); /** Topology version. */ private final AtomicReference<Snapshot> topSnap = new AtomicReference<>(new Snapshot(AffinityTopologyVersion.ZERO, null)); /** Minor topology version. */ private int minorTopVer; /** Order supported flag. */ private boolean discoOrdered; /** Topology snapshots history supported flag. */ private boolean histSupported; /** Configured network segment check frequency. */ private long segChkFreq; /** Local node join to topology event. */ private GridFutureAdapter<DiscoveryEvent> locJoinEvt = new GridFutureAdapter<>(); /** GC CPU load. */ private volatile double gcCpuLoad; /** CPU load. */ private volatile double cpuLoad; /** Metrics. */ private final GridLocalMetrics metrics = createMetrics(); /** Metrics update worker. */ private final MetricsUpdater metricsUpdater = new MetricsUpdater(); /** Custom event listener. */ private GridPlainInClosure<Serializable> customEvtLsnr; /** Map of dynamic cache filters. */ private Map<String, CachePredicate> registeredCaches = new HashMap<>(); /** @param ctx Context. */ public GridDiscoveryManager(GridKernalContext ctx) { super(ctx, ctx.config().getDiscoverySpi()); } /** * @return Memory usage of non-heap memory. */ private MemoryUsage nonHeapMemoryUsage() { // Workaround of exception in WebSphere. // We received the following exception: // java.lang.IllegalArgumentException: used value cannot be larger than the committed value // at java.lang.management.MemoryUsage.<init>(MemoryUsage.java:105) // at com.ibm.lang.management.MemoryMXBeanImpl.getNonHeapMemoryUsageImpl(Native Method) // at com.ibm.lang.management.MemoryMXBeanImpl.getNonHeapMemoryUsage(MemoryMXBeanImpl.java:143) // at org.apache.ignite.spi.metrics.jdk.GridJdkLocalMetricsSpi.getMetrics(GridJdkLocalMetricsSpi.java:242) // // We so had to workaround this with exception handling, because we can not control classes from WebSphere. try { return mem.getNonHeapMemoryUsage(); } catch (IllegalArgumentException ignored) { return new MemoryUsage(0, 0, 0, 0); } } /** {@inheritDoc} */ @Override public void onBeforeSpiStart() { DiscoverySpi spi = getSpi(); spi.setNodeAttributes(ctx.nodeAttributes(), VER); } /** * Adds dynamic cache filter. * * @param cacheName Cache name. * @param filter Cache filter. * @param loc {@code True} if cache is local. */ public void setCacheFilter( String cacheName, IgnitePredicate<ClusterNode> filter, boolean nearEnabled, boolean loc ) { if (!registeredCaches.containsKey(cacheName)) registeredCaches.put(cacheName, new CachePredicate(filter, nearEnabled, loc)); } /** * Removes dynamic cache filter. * * @param cacheName Cache name. */ public void removeCacheFilter(String cacheName) { registeredCaches.remove(cacheName); } /** * Adds near node ID to cache filter. * * @param cacheName Cache name. * @param clientNodeId Near node ID. */ public void addClientNode(String cacheName, UUID clientNodeId, boolean nearEnabled) { CachePredicate predicate = registeredCaches.get(cacheName); if (predicate != null) predicate.addClientNode(clientNodeId, nearEnabled); } /** * @return Client nodes map. */ public Map<String, Map<UUID, Boolean>> clientNodesMap() { Map<String, Map<UUID, Boolean>> res = null; for (Map.Entry<String, CachePredicate> entry : registeredCaches.entrySet()) { CachePredicate pred = entry.getValue(); if (!F.isEmpty(pred.clientNodes)) { if (res == null) res = U.newHashMap(registeredCaches.size()); res.put(entry.getKey(), new HashMap<>(pred.clientNodes)); } } return res; } /** * @param leftNodeId Left node ID. */ private void updateClientNodes(UUID leftNodeId) { for (Map.Entry<String, CachePredicate> entry : registeredCaches.entrySet()) { CachePredicate pred = entry.getValue(); pred.onNodeLeft(leftNodeId); } } /** * @param evtType Event type. * @return Next affinity topology version. */ private AffinityTopologyVersion nextTopologyVersion(int evtType, long topVer) { if (evtType == DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT) minorTopVer++; else if (evtType != EVT_NODE_METRICS_UPDATED) minorTopVer = 0; return new AffinityTopologyVersion(topVer, minorTopVer); } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { long totSysMemory = -1; try { totSysMemory = U.<Long>property(os, "totalPhysicalMemorySize"); } catch (RuntimeException ignored) { // No-op. } ctx.addNodeAttribute(IgniteNodeAttributes.ATTR_PHY_RAM, totSysMemory); DiscoverySpi spi = getSpi(); discoOrdered = discoOrdered(); histSupported = historySupported(); isLocDaemon = ctx.isDaemon(); hasRslvrs = !F.isEmpty(ctx.config().getSegmentationResolvers()); segChkFreq = ctx.config().getSegmentCheckFrequency(); if (hasRslvrs) { if (segChkFreq < 0) throw new IgniteCheckedException("Segment check frequency cannot be negative: " + segChkFreq); if (segChkFreq > 0 && segChkFreq < 2000) U.warn(log, "Configuration parameter 'segmentCheckFrequency' is too low " + "(at least 2000 ms recommended): " + segChkFreq); checkSegmentOnStart(); } new IgniteThread(metricsUpdater).start(); spi.setMetricsProvider(createMetricsProvider()); if (ctx.security().enabled()) { spi.setAuthenticator(new DiscoverySpiNodeAuthenticator() { @Override public SecurityContext authenticateNode(ClusterNode node, SecurityCredentials cred) { try { return ctx.security().authenticateNode(node, cred); } catch (IgniteCheckedException e) { throw U.convertException(e); } } @Override public boolean isGlobalNodeAuthentication() { return ctx.security().isGlobalNodeAuthentication(); } }); } spi.setListener(new DiscoverySpiListener() { @Override public void onDiscovery( int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot, Map<Long, Collection<ClusterNode>> snapshots, @Nullable Serializable data ) { final ClusterNode locNode = localNode(); if (snapshots != null) topHist = snapshots; AffinityTopologyVersion nextTopVer = nextTopologyVersion(type, topVer); if (type == EVT_NODE_FAILED || type == EVT_NODE_LEFT) { for (DiscoCache c : discoCacheHist.values()) c.updateAlives(node); updateClientNodes(node.id()); } if (type == DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT) { try { if (customEvtLsnr != null) customEvtLsnr.apply(data); } catch (Exception e) { U.error(log, "Failed to notify direct custom event listener: " + data, e); } } // Put topology snapshot into discovery history. // There is no race possible between history maintenance and concurrent discovery // event notifications, since SPI notifies manager about all events from this listener. if (type != EVT_NODE_METRICS_UPDATED) { DiscoCache cache = new DiscoCache(locNode, F.view(topSnapshot, F.remoteNodes(locNode.id()))); discoCacheHist.put(nextTopVer, cache); boolean set = updateTopologyVersionIfGreater(nextTopVer, cache); assert set || topVer == 0 : "Topology version has not been updated [this.topVer=" + topSnap + ", topVer=" + topVer + ", node=" + node + ", evt=" + U.gridEventName(type) + ']'; } // If this is a local join event, just save it and do not notify listeners. if (type == EVT_NODE_JOINED && node.id().equals(locNode.id())) { DiscoveryEvent discoEvt = new DiscoveryEvent(); discoEvt.node(ctx.discovery().localNode()); discoEvt.eventNode(node); discoEvt.type(EVT_NODE_JOINED); discoEvt.topologySnapshot(topVer, new ArrayList<>( F.viewReadOnly(topSnapshot, new C1<ClusterNode, ClusterNode>() { @Override public ClusterNode apply(ClusterNode e) { return e; } }, daemonFilter))); locJoinEvt.onDone(discoEvt); return; } discoWrk.addEvent(type, nextTopVer, node, topSnapshot, data); } }); spi.setDataExchange(new DiscoverySpiDataExchange() { @Override public Map<Integer, Serializable> collect(UUID nodeId) { assert nodeId != null; Map<Integer, Serializable> data = new HashMap<>(); for (GridComponent comp : ctx.components()) { Serializable compData = comp.collectDiscoveryData(nodeId); if (compData != null) { assert comp.discoveryDataType() != null; data.put(comp.discoveryDataType().ordinal(), compData); } } return data; } @Override public void onExchange(UUID joiningNodeId, UUID nodeId, Map<Integer, Serializable> data) { for (Map.Entry<Integer, Serializable> e : data.entrySet()) { GridComponent comp = null; for (GridComponent c : ctx.components()) { if (c.discoveryDataType() != null && c.discoveryDataType().ordinal() == e.getKey()) { comp = c; break; } } if (comp != null) comp.onDiscoveryDataReceived(joiningNodeId, nodeId, e.getValue()); else U.warn(log, "Received discovery data for unknown component: " + e.getKey()); } } }); startSpi(); // Start segment check worker only if frequency is greater than 0. if (hasRslvrs && segChkFreq > 0) { segChkWrk = new SegmentCheckWorker(); segChkThread = new IgniteThread(segChkWrk); segChkThread.start(); } locNode = spi.getLocalNode(); updateTopologyVersionIfGreater(new AffinityTopologyVersion(locNode.order()), new DiscoCache(localNode(), getSpi().getRemoteNodes())); checkAttributes(discoCache().remoteNodes()); // Start discovery worker. new IgniteThread(discoWrk).start(); if (log.isDebugEnabled()) log.debug(startInfo()); } /** * @param customEvtLsnr Custom event listener. */ public void setCustomEventListener(GridPlainInClosure<Serializable> customEvtLsnr) { this.customEvtLsnr = customEvtLsnr; } /** * @return Metrics. */ private GridLocalMetrics createMetrics() { return new GridLocalMetrics() { @Override public int getAvailableProcessors() { return os.getAvailableProcessors(); } @Override public double getCurrentCpuLoad() { return cpuLoad; } @Override public double getCurrentGcCpuLoad() { return gcCpuLoad; } @Override public long getHeapMemoryInitialized() { return mem.getHeapMemoryUsage().getInit(); } @Override public long getHeapMemoryUsed() { return mem.getHeapMemoryUsage().getUsed(); } @Override public long getHeapMemoryCommitted() { return mem.getHeapMemoryUsage().getCommitted(); } @Override public long getHeapMemoryMaximum() { return mem.getHeapMemoryUsage().getMax(); } @Override public long getNonHeapMemoryInitialized() { return nonHeapMemoryUsage().getInit(); } @Override public long getNonHeapMemoryUsed() { return nonHeapMemoryUsage().getUsed(); } @Override public long getNonHeapMemoryCommitted() { return nonHeapMemoryUsage().getCommitted(); } @Override public long getNonHeapMemoryMaximum() { return nonHeapMemoryUsage().getMax(); } @Override public long getUptime() { return rt.getUptime(); } @Override public long getStartTime() { return rt.getStartTime(); } @Override public int getThreadCount() { return threads.getThreadCount(); } @Override public int getPeakThreadCount() { return threads.getPeakThreadCount(); } @Override public long getTotalStartedThreadCount() { return threads.getTotalStartedThreadCount(); } @Override public int getDaemonThreadCount() { return threads.getDaemonThreadCount(); } }; } /** * @return Metrics provider. */ private DiscoveryMetricsProvider createMetricsProvider() { return new DiscoveryMetricsProvider() { /** */ private final long startTime = U.currentTimeMillis(); /** {@inheritDoc} */ @Override public ClusterMetrics metrics() { GridJobMetrics jm = ctx.jobMetric().getJobMetrics(); ClusterMetricsSnapshot nm = new ClusterMetricsSnapshot(); nm.setLastUpdateTime(U.currentTimeMillis()); // Job metrics. nm.setMaximumActiveJobs(jm.getMaximumActiveJobs()); nm.setCurrentActiveJobs(jm.getCurrentActiveJobs()); nm.setAverageActiveJobs(jm.getAverageActiveJobs()); nm.setMaximumWaitingJobs(jm.getMaximumWaitingJobs()); nm.setCurrentWaitingJobs(jm.getCurrentWaitingJobs()); nm.setAverageWaitingJobs(jm.getAverageWaitingJobs()); nm.setMaximumRejectedJobs(jm.getMaximumRejectedJobs()); nm.setCurrentRejectedJobs(jm.getCurrentRejectedJobs()); nm.setAverageRejectedJobs(jm.getAverageRejectedJobs()); nm.setMaximumCancelledJobs(jm.getMaximumCancelledJobs()); nm.setCurrentCancelledJobs(jm.getCurrentCancelledJobs()); nm.setAverageCancelledJobs(jm.getAverageCancelledJobs()); nm.setTotalRejectedJobs(jm.getTotalRejectedJobs()); nm.setTotalCancelledJobs(jm.getTotalCancelledJobs()); nm.setTotalExecutedJobs(jm.getTotalExecutedJobs()); nm.setMaximumJobWaitTime(jm.getMaximumJobWaitTime()); nm.setCurrentJobWaitTime(jm.getCurrentJobWaitTime()); nm.setAverageJobWaitTime(jm.getAverageJobWaitTime()); nm.setMaximumJobExecuteTime(jm.getMaximumJobExecuteTime()); nm.setCurrentJobExecuteTime(jm.getCurrentJobExecuteTime()); nm.setAverageJobExecuteTime(jm.getAverageJobExecuteTime()); nm.setCurrentIdleTime(jm.getCurrentIdleTime()); nm.setTotalIdleTime(jm.getTotalIdleTime()); nm.setAverageCpuLoad(jm.getAverageCpuLoad()); // Job metrics. nm.setTotalExecutedTasks(ctx.task().getTotalExecutedTasks()); // VM metrics. nm.setAvailableProcessors(metrics.getAvailableProcessors()); nm.setCurrentCpuLoad(metrics.getCurrentCpuLoad()); nm.setCurrentGcCpuLoad(metrics.getCurrentGcCpuLoad()); nm.setHeapMemoryInitialized(metrics.getHeapMemoryInitialized()); nm.setHeapMemoryUsed(metrics.getHeapMemoryUsed()); nm.setHeapMemoryCommitted(metrics.getHeapMemoryCommitted()); nm.setHeapMemoryMaximum(metrics.getHeapMemoryMaximum()); nm.setHeapMemoryTotal(metrics.getHeapMemoryMaximum()); nm.setNonHeapMemoryInitialized(metrics.getNonHeapMemoryInitialized()); nm.setNonHeapMemoryUsed(metrics.getNonHeapMemoryUsed()); nm.setNonHeapMemoryCommitted(metrics.getNonHeapMemoryCommitted()); nm.setNonHeapMemoryMaximum(metrics.getNonHeapMemoryMaximum()); nm.setNonHeapMemoryTotal(metrics.getNonHeapMemoryMaximum()); nm.setUpTime(metrics.getUptime()); nm.setStartTime(metrics.getStartTime()); nm.setNodeStartTime(startTime); nm.setCurrentThreadCount(metrics.getThreadCount()); nm.setMaximumThreadCount(metrics.getPeakThreadCount()); nm.setTotalStartedThreadCount(metrics.getTotalStartedThreadCount()); nm.setCurrentDaemonThreadCount(metrics.getDaemonThreadCount()); nm.setTotalNodes(1); // Data metrics. nm.setLastDataVersion(ctx.cache().lastDataVersion()); GridIoManager io = ctx.io(); // IO metrics. nm.setSentMessagesCount(io.getSentMessagesCount()); nm.setSentBytesCount(io.getSentBytesCount()); nm.setReceivedMessagesCount(io.getReceivedMessagesCount()); nm.setReceivedBytesCount(io.getReceivedBytesCount()); nm.setOutboundMessagesQueueSize(io.getOutboundMessagesQueueSize()); return nm; } /** {@inheritDoc} */ @Override public Map<Integer, CacheMetrics> cacheMetrics() { Collection<GridCacheAdapter<?, ?>> caches = ctx.cache().internalCaches(); if (F.isEmpty(caches)) return Collections.emptyMap(); Map<Integer, CacheMetrics> metrics = null; for (GridCacheAdapter<?, ?> cache : caches) { if (cache.configuration().isStatisticsEnabled()) { if (metrics == null) metrics = U.newHashMap(caches.size()); metrics.put(cache.context().cacheId(), cache.metrics()); } } return metrics == null ? Collections.<Integer, CacheMetrics>emptyMap() : metrics; } }; } /** * @return Local metrics. */ public GridLocalMetrics metrics() { return metrics; } /** @return {@code True} if ordering is supported. */ private boolean discoOrdered() { DiscoverySpiOrderSupport ann = U.getAnnotation(ctx.config().getDiscoverySpi().getClass(), DiscoverySpiOrderSupport.class); return ann != null && ann.value(); } /** @return {@code True} if topology snapshots history is supported. */ private boolean historySupported() { DiscoverySpiHistorySupport ann = U.getAnnotation(ctx.config().getDiscoverySpi().getClass(), DiscoverySpiHistorySupport.class); return ann != null && ann.value(); } /** * Checks segment on start waiting for correct segment if necessary. * * @throws IgniteCheckedException If check failed. */ private void checkSegmentOnStart() throws IgniteCheckedException { assert hasRslvrs; if (log.isDebugEnabled()) log.debug("Starting network segment check."); while (true) { if (ctx.segmentation().isValidSegment()) break; if (ctx.config().isWaitForSegmentOnStart()) { LT.warn(log, null, "Failed to check network segment (retrying every 2000 ms)."); // Wait and check again. U.sleep(2000); } else throw new IgniteCheckedException("Failed to check network segment."); } if (log.isDebugEnabled()) log.debug("Finished network segment check successfully."); } /** * Checks whether attributes of the local node are consistent with remote nodes. * * @param nodes List of remote nodes to check attributes on. * @throws IgniteCheckedException In case of error. */ private void checkAttributes(Iterable<ClusterNode> nodes) throws IgniteCheckedException { ClusterNode locNode = getSpi().getLocalNode(); assert locNode != null; // Fetch local node attributes once. String locPreferIpV4 = locNode.attribute("java.net.preferIPv4Stack"); Object locMode = locNode.attribute(ATTR_DEPLOYMENT_MODE); int locJvmMajVer = nodeJavaMajorVer(locNode); boolean locP2pEnabled = locNode.attribute(ATTR_PEER_CLASSLOADING); boolean warned = false; for (ClusterNode n : nodes) { int rmtJvmMajVer = nodeJavaMajorVer(n); if (locJvmMajVer != rmtJvmMajVer) throw new IgniteCheckedException("Local node's java major version = " + locJvmMajVer + " is different from remote node's one = " + rmtJvmMajVer); String rmtPreferIpV4 = n.attribute("java.net.preferIPv4Stack"); if (!F.eq(rmtPreferIpV4, locPreferIpV4)) { if (!warned) U.warn(log, "Local node's value of 'java.net.preferIPv4Stack' " + "system property differs from remote node's " + "(all nodes in topology should have identical value) " + "[locPreferIpV4=" + locPreferIpV4 + ", rmtPreferIpV4=" + rmtPreferIpV4 + ", locId8=" + U.id8(locNode.id()) + ", rmtId8=" + U.id8(n.id()) + ", rmtAddrs=" + U.addressesAsString(n) + ']', "Local and remote 'java.net.preferIPv4Stack' system properties do not match."); warned = true; } // Daemon nodes are allowed to have any deployment they need. // Skip data center ID check for daemon nodes. if (!isLocDaemon && !n.isDaemon()) { Object rmtMode = n.attribute(ATTR_DEPLOYMENT_MODE); if (!locMode.equals(rmtMode)) throw new IgniteCheckedException("Remote node has deployment mode different from local " + "[locId8=" + U.id8(locNode.id()) + ", locMode=" + locMode + ", rmtId8=" + U.id8(n.id()) + ", rmtMode=" + rmtMode + ", rmtAddrs=" + U.addressesAsString(n) + ']'); boolean rmtP2pEnabled = n.attribute(ATTR_PEER_CLASSLOADING); if (locP2pEnabled != rmtP2pEnabled) throw new IgniteCheckedException("Remote node has peer class loading enabled flag different from local " + "[locId8=" + U.id8(locNode.id()) + ", locPeerClassLoading=" + locP2pEnabled + ", rmtId8=" + U.id8(n.id()) + ", rmtPeerClassLoading=" + rmtP2pEnabled + ", rmtAddrs=" + U.addressesAsString(n) + ']'); } } if (log.isDebugEnabled()) log.debug("Finished node attributes consistency check."); } private int nodeJavaMajorVer(ClusterNode node) throws IgniteCheckedException { try { return Integer.parseInt(node.<String>attribute("java.version").split(".")[1]); } catch (Exception e) { throw new IgniteCheckedException("Failed to get java major version with reason: " + e.getMessage()); } } /** * @param nodes Nodes. * @return Total CPUs. */ private static int cpus(Collection<ClusterNode> nodes) { Collection<String> macSet = new HashSet<>(nodes.size(), 1.0f); int cpus = 0; for (ClusterNode n : nodes) { String macs = n.attribute(ATTR_MACS); if (macSet.add(macs)) cpus += n.metrics().getTotalCpus(); } return cpus; } /** * Prints the latest topology info into log taking into account logging/verbosity settings. */ public void ackTopology() { ackTopology(topSnap.get().topVer.topologyVersion(), false); } /** * Logs grid size for license compliance. * * @param topVer Topology version. * @param throttle Suppress printing if this topology was already printed. */ private void ackTopology(long topVer, boolean throttle) { assert !isLocDaemon; DiscoCache discoCache = discoCache(); Collection<ClusterNode> rmtNodes = discoCache.remoteNodes(); ClusterNode locNode = discoCache.localNode(); Collection<ClusterNode> allNodes = discoCache.allNodes(); long hash = topologyHash(allNodes); // Prevent ack-ing topology for the same topology. // Can happen only during node startup. if (throttle && lastLoggedTop.getAndSet(hash) == hash) return; int totalCpus = cpus(allNodes); double heap = U.heapSize(allNodes, 2); if (log.isQuiet()) U.quiet(false, topologySnapshotMessage(rmtNodes.size(), totalCpus, heap)); if (log.isDebugEnabled()) { String dbg = ""; dbg += U.nl() + U.nl() + ">>> +----------------+" + U.nl() + ">>> " + PREFIX + "." + U.nl() + ">>> +----------------+" + U.nl() + ">>> Grid name: " + (ctx.gridName() == null ? "default" : ctx.gridName()) + U.nl() + ">>> Number of nodes: " + (rmtNodes.size() + 1) + U.nl() + (discoOrdered ? ">>> Topology version: " + topVer + U.nl() : "") + ">>> Topology hash: 0x" + Long.toHexString(hash).toUpperCase() + U.nl(); dbg += ">>> Local: " + locNode.id().toString().toUpperCase() + ", " + U.addressesAsString(locNode) + ", " + locNode.order() + ", " + locNode.attribute("os.name") + ' ' + locNode.attribute("os.arch") + ' ' + locNode.attribute("os.version") + ", " + System.getProperty("user.name") + ", " + locNode.attribute("java.runtime.name") + ' ' + locNode.attribute("java.runtime.version") + U.nl(); for (ClusterNode node : rmtNodes) dbg += ">>> Remote: " + node.id().toString().toUpperCase() + ", " + U.addressesAsString(node) + ", " + node.order() + ", " + node.attribute("os.name") + ' ' + node.attribute("os.arch") + ' ' + node.attribute("os.version") + ", " + node.attribute(ATTR_USER_NAME) + ", " + node.attribute("java.runtime.name") + ' ' + node.attribute("java.runtime.version") + U.nl(); dbg += ">>> Total number of CPUs: " + totalCpus + U.nl(); dbg += ">>> Total heap size: " + heap + "GB" + U.nl(); log.debug(dbg); } else if (log.isInfoEnabled()) log.info(topologySnapshotMessage(rmtNodes.size(), totalCpus, heap)); } /** * @param rmtNodesNum Remote nodes number. * @param totalCpus Total cpu number. * @param heap Heap size. * @return Topology snapshot message. */ private String topologySnapshotMessage(int rmtNodesNum, int totalCpus, double heap) { return PREFIX + " [" + (discoOrdered ? "ver=" + topSnap.get().topVer.topologyVersion() + ", " : "") + "nodes=" + (rmtNodesNum + 1) + ", CPUs=" + totalCpus + ", heap=" + heap + "GB" + ']'; } /** {@inheritDoc} */ @Override public void onKernalStop0(boolean cancel) { // Stop segment check worker. if (segChkWrk != null) { segChkWrk.cancel(); U.join(segChkThread, log); } if (!locJoinEvt.isDone()) locJoinEvt.onDone(new IgniteCheckedException("Failed to wait for local node joined event (grid is stopping).")); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { // Stop receiving notifications. getSpi().setListener(null); // Stop discovery worker and metrics updater. U.cancel(discoWrk); U.cancel(metricsUpdater); U.join(discoWrk, log); U.join(metricsUpdater, log); // Stop SPI itself. stopSpi(); if (log.isDebugEnabled()) log.debug(stopInfo()); } /** * @param nodeIds Node IDs to check. * @return {@code True} if at least one ID belongs to an alive node. */ public boolean aliveAll(@Nullable Collection<UUID> nodeIds) { if (nodeIds == null || nodeIds.isEmpty()) return false; for (UUID id : nodeIds) if (!alive(id)) return false; return true; } /** * @param nodeId Node ID. * @return {@code True} if node for given ID is alive. */ public boolean alive(UUID nodeId) { assert nodeId != null; return getSpi().getNode(nodeId) != null; // Go directly to SPI without checking disco cache. } /** * @param node Node. * @return {@code True} if node is alive. */ public boolean alive(ClusterNode node) { assert node != null; return alive(node.id()); } /** * @param nodeId ID of the node. * @return {@code True} if ping succeeded. */ public boolean pingNode(UUID nodeId) { assert nodeId != null; return getSpi().pingNode(nodeId); } /** * @param nodeId ID of the node. * @return Node for ID. */ @Nullable public ClusterNode node(UUID nodeId) { assert nodeId != null; return discoCache().node(nodeId); } /** * Gets collection of node for given node IDs and predicates. * * @param ids Ids to include. * @param p Filter for IDs. * @return Collection with all alive nodes for given IDs. */ public Collection<ClusterNode> nodes(@Nullable Collection<UUID> ids, IgnitePredicate<UUID>... p) { return F.isEmpty(ids) ? Collections.<ClusterNode>emptyList() : F.view( F.viewReadOnly(ids, U.id2Node(ctx), p), F.notNull()); } /** * Gets topology hash for given set of nodes. * * @param nodes Subset of grid nodes for hashing. * @return Hash for given topology. */ public long topologyHash(Iterable<? extends ClusterNode> nodes) { assert nodes != null; Iterator<? extends ClusterNode> iter = nodes.iterator(); if (!iter.hasNext()) return 0; // Special case. List<String> uids = new ArrayList<>(); for (ClusterNode node : nodes) uids.add(node.id().toString()); Collections.sort(uids); CRC32 hash = new CRC32(); for (String uuid : uids) hash.update(uuid.getBytes()); return hash.getValue(); } /** * Gets future that will be completed when current topology version becomes greater or equal to argument passed. * * @param awaitVer Topology version to await. * @return Future. */ public IgniteInternalFuture<Long> topologyFuture(final long awaitVer) { long topVer = topologyVersion(); if (topVer >= awaitVer) return new GridFinishedFuture<>(topVer); DiscoTopologyFuture fut = new DiscoTopologyFuture(ctx, awaitVer); fut.init(); return fut; } /** * Gets discovery collection cache from SPI safely guarding against "floating" collections. * * @return Discovery collection cache. */ public DiscoCache discoCache() { Snapshot cur; while ((cur = topSnap.get()) == null) { // Wrap the SPI collection to avoid possible floating collection. if (topSnap.compareAndSet(null, cur = new Snapshot( AffinityTopologyVersion.ZERO, new DiscoCache(localNode(), getSpi().getRemoteNodes())))) { return cur.discoCache; } } return cur.discoCache; } /** * Gets discovery collection cache from SPI safely guarding against "floating" collections. * * @return Discovery collection cache. */ public DiscoCache discoCache(AffinityTopologyVersion topVer) { return discoCacheHist.get(topVer); } /** @return All non-daemon remote nodes in topology. */ public Collection<ClusterNode> remoteNodes() { return discoCache().remoteNodes(); } /** @return All non-daemon nodes in topology. */ public Collection<ClusterNode> allNodes() { return discoCache().allNodes(); } /** * Gets topology grouped by node versions. * * @return Version to collection of nodes map. */ public NavigableMap<IgniteProductVersion, Collection<ClusterNode>> topologyVersionMap() { return discoCache().versionsMap(); } /** @return Full topology size. */ public int size() { return discoCache().allNodes().size(); } /** * Gets all nodes for given topology version. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> nodes(long topVer) { return resolveDiscoCache(null, new AffinityTopologyVersion(topVer)).allNodes(); } /** * Gets cache nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> cacheNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).cacheNodes(cacheName, topVer.topologyVersion()); } /** * Gets all nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> cacheNodes(AffinityTopologyVersion topVer) { return resolveDiscoCache(null, topVer).allNodesWithCaches(topVer.topologyVersion()); } /** * Gets cache remote nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> remoteCacheNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).remoteCacheNodes(cacheName, topVer.topologyVersion()); } /** * Gets cache remote nodes for cache with given name. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> remoteCacheNodes(AffinityTopologyVersion topVer) { return resolveDiscoCache(null, topVer).remoteCacheNodes(topVer.topologyVersion()); } /** * Gets cache nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> aliveCacheNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).aliveCacheNodes(cacheName, topVer.topologyVersion()); } /** * Gets cache remote nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> aliveRemoteCacheNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).aliveRemoteCacheNodes(cacheName, topVer.topologyVersion()); } /** * Gets alive remote nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of alive cache nodes. */ public Collection<ClusterNode> aliveRemoteNodesWithCaches(AffinityTopologyVersion topVer) { return resolveDiscoCache(null, topVer).aliveRemoteNodesWithCaches(topVer.topologyVersion()); } /** * Gets alive nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of alive cache nodes. */ public Collection<ClusterNode> aliveNodesWithCaches(AffinityTopologyVersion topVer) { return resolveDiscoCache(null, topVer).aliveNodesWithCaches(topVer.topologyVersion()); } /** * Gets cache nodes for cache with given name that participate in affinity calculation. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache affinity nodes. */ public Collection<ClusterNode> cacheAffinityNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).cacheAffinityNodes(cacheName, topVer.topologyVersion()); } /** * Checks if node is a data node for the given cache. * * @param node Node to check. * @param cacheName Cache name. * @return {@code True} if node is a cache data node. */ public boolean cacheAffinityNode(ClusterNode node, String cacheName) { CachePredicate predicate = registeredCaches.get(cacheName); return predicate != null && predicate.dataNode(node); } /** * @param node Node to check. * @param cacheName Cache name. * @return {@code True} if node has near cache enabled. */ public boolean cacheNearNode(ClusterNode node, String cacheName) { CachePredicate predicate = registeredCaches.get(cacheName); return predicate != null && predicate.nearNode(node); } /** * @param node Node to check. * @param cacheName Cache name. * @return {@code True} if node has client cache (without near cache). */ public boolean cacheClientNode(ClusterNode node, String cacheName) { CachePredicate predicate = registeredCaches.get(cacheName); return predicate != null && predicate.clientNode(node); } /** * @param node Node to check. * @param cacheName Cache name. * @return If cache with the given name is accessible on the given node. */ public boolean cacheNode(ClusterNode node, String cacheName) { CachePredicate predicate = registeredCaches.get(cacheName); return predicate != null && predicate.cacheNode(node); } /** * Checks if cache with given name has at least one node with near cache enabled. * * @param cacheName Cache name. * @param topVer Topology version. * @return {@code True} if cache with given name has at least one node with near cache enabled. */ public boolean hasNearCache(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).hasNearCache(cacheName); } /** * Gets discovery cache for given topology version. * * @param cacheName Cache name (participates in exception message). * @param topVer Topology version. * @return Discovery cache. */ private DiscoCache resolveDiscoCache(@Nullable String cacheName, AffinityTopologyVersion topVer) { Snapshot snap = topSnap.get(); DiscoCache cache = AffinityTopologyVersion.NONE.equals(topVer) || topVer.equals(snap.topVer) ? snap.discoCache : discoCacheHist.get(topVer); if (cache == null) { // Find the eldest acceptable discovery cache. Map.Entry<AffinityTopologyVersion, DiscoCache> eldest = Collections.min(discoCacheHist.entrySet(), histCmp); if (topVer.compareTo(eldest.getKey()) < 0) cache = eldest.getValue(); } if (cache == null) { throw new IgniteException("Failed to resolve nodes topology [cacheName=" + cacheName + ", topVer=" + topVer + ", history=" + discoCacheHist.keySet() + ", locNode=" + ctx.discovery().localNode() + ']'); } return cache; } /** * Gets topology by specified version from history storage. * * @param topVer Topology version. * @return Topology nodes or {@code null} if there are no nodes for passed in version. */ @Nullable public Collection<ClusterNode> topology(long topVer) { if (!histSupported) throw new UnsupportedOperationException("Current discovery SPI does not support " + "topology snapshots history (consider using TCP discovery SPI)."); Map<Long, Collection<ClusterNode>> snapshots = topHist; return snapshots.get(topVer); } /** @return All daemon nodes in topology. */ public Collection<ClusterNode> daemonNodes() { return discoCache().daemonNodes(); } /** @return Local node. */ public ClusterNode localNode() { return locNode == null ? getSpi().getLocalNode() : locNode; } /** @return Topology version. */ public long topologyVersion() { return topSnap.get().topVer.topologyVersion(); } /** * @return Topology version. */ public AffinityTopologyVersion topologyVersionEx() { return topSnap.get().topVer; } /** @return Event that represents a local node joined to topology. */ public DiscoveryEvent localJoinEvent() { try { return locJoinEvt.get(); } catch (IgniteCheckedException e) { throw new IgniteException(e); } } /** * @param evt Event. */ public void sendCustomEvent(Serializable evt) { getSpi().sendCustomEvent(evt); } /** * Gets first grid node start time, see {@link DiscoverySpi#getGridStartTime()}. * * @return Start time of the first grid node. */ public long gridStartTime() { return getSpi().getGridStartTime(); } /** * @param nodeId Node ID. * @return Whether node is failed. */ public boolean tryFailNode(UUID nodeId) { if (!getSpi().pingNode(nodeId)) { getSpi().failNode(nodeId); return true; } return false; } /** * Updates topology version if current version is smaller than updated. * * @param updated Updated topology version. * @return {@code True} if topology was updated. */ private boolean updateTopologyVersionIfGreater(AffinityTopologyVersion updated, DiscoCache discoCache) { while (true) { Snapshot cur = topSnap.get(); if (updated.compareTo(cur.topVer) >= 0) { if (topSnap.compareAndSet(cur, new Snapshot(updated, discoCache))) return true; } else return false; } } /** Stops local node. */ private void stopNode() { new Thread( new Runnable() { @Override public void run() { ctx.markSegmented(); G.stop(ctx.gridName(), true); } } ).start(); } /** Restarts JVM. */ private void restartJvm() { new Thread( new Runnable() { @Override public void run() { ctx.markSegmented(); G.restart(true); } } ).start(); } /** Worker for network segment checks. */ private class SegmentCheckWorker extends GridWorker { /** */ private final BlockingQueue<Object> queue = new LinkedBlockingQueue<>(); /** * */ private SegmentCheckWorker() { super(ctx.gridName(), "disco-net-seg-chk-worker", GridDiscoveryManager.this.log); assert hasRslvrs; assert segChkFreq > 0; } /** * */ public void scheduleSegmentCheck() { queue.add(new Object()); } /** {@inheritDoc} */ @SuppressWarnings("StatementWithEmptyBody") @Override protected void body() throws InterruptedException { long lastChk = 0; while (!isCancelled()) { Object req = queue.poll(2000, MILLISECONDS); long now = U.currentTimeMillis(); // Check frequency if segment check has not been requested. if (req == null && (segChkFreq == 0 || lastChk + segChkFreq >= now)) { if (log.isDebugEnabled()) log.debug("Skipping segment check as it has not been requested and it is not time to check."); continue; } // We should always check segment if it has been explicitly // requested (on any node failure or leave). assert req != null || lastChk + segChkFreq < now; // Drain queue. while (queue.poll() != null) { // No-op. } if (lastSegChkRes.get()) { boolean segValid = ctx.segmentation().isValidSegment(); lastChk = now; if (!segValid) { discoWrk.addEvent(EVT_NODE_SEGMENTED, AffinityTopologyVersion.NONE, getSpi().getLocalNode(), Collections.<ClusterNode>emptyList(), null); lastSegChkRes.set(false); } if (log.isDebugEnabled()) log.debug("Segment has been checked [requested=" + (req != null) + ", valid=" + segValid + ']'); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(SegmentCheckWorker.class, this); } } /** Worker for discovery events. */ private class DiscoveryWorker extends GridWorker { /** Event queue. */ private final BlockingQueue<GridTuple5<Integer, AffinityTopologyVersion, ClusterNode, Collection<ClusterNode>, Serializable>> evts = new LinkedBlockingQueue<>(); /** Node segmented event fired flag. */ private boolean nodeSegFired; /** * */ private DiscoveryWorker() { super(ctx.gridName(), "disco-event-worker", GridDiscoveryManager.this.log); } /** * Method is called when any discovery event occurs. * * @param type Discovery event type. See {@link DiscoveryEvent} for more details. * @param topVer Topology version. * @param node Remote node this event is connected with. * @param topSnapshot Topology snapshot. */ @SuppressWarnings("RedundantTypeArguments") private void recordEvent(int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot) { assert node != null; if (ctx.event().isRecordable(type)) { DiscoveryEvent evt = new DiscoveryEvent(); evt.node(ctx.discovery().localNode()); evt.eventNode(node); evt.type(type); evt.topologySnapshot(topVer, U.<ClusterNode, ClusterNode>arrayList(topSnapshot, daemonFilter)); if (type == EVT_NODE_METRICS_UPDATED) evt.message("Metrics were updated: " + node); else if (type == EVT_NODE_JOINED) evt.message("Node joined: " + node); else if (type == EVT_NODE_LEFT) evt.message("Node left: " + node); else if (type == EVT_NODE_FAILED) evt.message("Node failed: " + node); else if (type == EVT_NODE_SEGMENTED) evt.message("Node segmented: " + node); else assert false; ctx.event().record(evt); } } /** * @param type Event type. * @param topVer Topology version. * @param node Node. * @param topSnapshot Topology snapshot. */ void addEvent( int type, AffinityTopologyVersion topVer, ClusterNode node, Collection<ClusterNode> topSnapshot, @Nullable Serializable data ) { assert node != null; evts.add(F.t(type, topVer, node, topSnapshot, data)); } /** * @param node Node to get a short description for. * @return Short description for the node to be used in 'quiet' mode. */ private String quietNode(ClusterNode node) { assert node != null; return "nodeId8=" + node.id().toString().substring(0, 8) + ", " + "addrs=" + U.addressesAsString(node) + ", " + "order=" + node.order() + ", " + "CPUs=" + node.metrics().getTotalCpus(); } /** {@inheritDoc} */ @Override protected void body() throws InterruptedException { while (!isCancelled()) { try { body0(); } catch (InterruptedException e) { throw e; } catch (Throwable t) { U.error(log, "Unexpected exception in discovery worker thread (ignored).", t); } } } /** @throws InterruptedException If interrupted. */ @SuppressWarnings("DuplicateCondition") private void body0() throws InterruptedException { GridTuple5<Integer, AffinityTopologyVersion, ClusterNode, Collection<ClusterNode>, Serializable> evt = evts.take(); int type = evt.get1(); AffinityTopologyVersion topVer = evt.get2(); ClusterNode node = evt.get3(); boolean isDaemon = node.isDaemon(); boolean segmented = false; switch (type) { case EVT_NODE_JOINED: { assert !discoOrdered || topVer.topologyVersion() == node.order() : "Invalid topology version [topVer=" + topVer + ", node=" + node + ']'; try { checkAttributes(F.asList(node)); } catch (IgniteCheckedException e) { U.warn(log, e.getMessage()); // We a have well-formed attribute warning here. } if (!isDaemon) { if (!isLocDaemon) { if (log.isInfoEnabled()) log.info("Added new node to topology: " + node); ackTopology(topVer.topologyVersion(), true); } else if (log.isDebugEnabled()) log.debug("Added new node to topology: " + node); } else if (log.isDebugEnabled()) log.debug("Added new daemon node to topology: " + node); break; } case EVT_NODE_LEFT: { // Check only if resolvers were configured. if (hasRslvrs) segChkWrk.scheduleSegmentCheck(); if (!isDaemon) { if (!isLocDaemon) { if (log.isInfoEnabled()) log.info("Node left topology: " + node); ackTopology(topVer.topologyVersion(), true); } else if (log.isDebugEnabled()) log.debug("Node left topology: " + node); } else if (log.isDebugEnabled()) log.debug("Daemon node left topology: " + node); break; } case EVT_NODE_FAILED: { // Check only if resolvers were configured. if (hasRslvrs) segChkWrk.scheduleSegmentCheck(); if (!isDaemon) { if (!isLocDaemon) { U.warn(log, "Node FAILED: " + node); ackTopology(topVer.topologyVersion(), true); } else if (log.isDebugEnabled()) log.debug("Node FAILED: " + node); } else if (log.isDebugEnabled()) log.debug("Daemon node FAILED: " + node); break; } case EVT_NODE_SEGMENTED: { assert F.eqNodes(localNode(), node); if (nodeSegFired) { if (log.isDebugEnabled()) { log.debug("Ignored node segmented event [type=EVT_NODE_SEGMENTED, " + "node=" + node + ']'); } return; } // Ignore all further EVT_NODE_SEGMENTED events // until EVT_NODE_RECONNECTED is fired. nodeSegFired = true; lastLoggedTop.set(0); segmented = true; if (!isLocDaemon) U.warn(log, "Local node SEGMENTED: " + node); else if (log.isDebugEnabled()) log.debug("Local node SEGMENTED: " + node); break; } case DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT: { if (ctx.event().isRecordable(DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT)) { DiscoveryCustomEvent customEvt = new DiscoveryCustomEvent(); customEvt.node(ctx.discovery().localNode()); customEvt.eventNode(node); customEvt.type(type); customEvt.topologySnapshot(topVer.topologyVersion(), null); customEvt.affinityTopologyVersion(topVer); customEvt.data(evt.get5()); ctx.event().record(customEvt); } return; } // Don't log metric update to avoid flooding the log. case EVT_NODE_METRICS_UPDATED: break; default: assert false : "Invalid discovery event: " + type; } recordEvent(type, topVer.topologyVersion(), node, evt.get4()); if (segmented) onSegmentation(); } /** * */ private void onSegmentation() { SegmentationPolicy segPlc = ctx.config().getSegmentationPolicy(); // Always disconnect first. try { getSpi().disconnect(); } catch (IgniteSpiException e) { U.error(log, "Failed to disconnect discovery SPI.", e); } switch (segPlc) { case RESTART_JVM: U.warn(log, "Restarting JVM according to configured segmentation policy."); restartJvm(); break; case STOP: U.warn(log, "Stopping local node according to configured segmentation policy."); stopNode(); break; default: assert segPlc == NOOP : "Unsupported segmentation policy value: " + segPlc; } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DiscoveryWorker.class, this); } } /** * */ private class MetricsUpdater extends GridWorker { /** */ private long prevGcTime = -1; /** */ private long prevCpuTime = -1; /** * */ private MetricsUpdater() { super(ctx.gridName(), "metrics-updater", GridDiscoveryManager.this.log); } /** {@inheritDoc} */ @Override protected void body() throws IgniteInterruptedCheckedException { while (!isCancelled()) { U.sleep(METRICS_UPDATE_FREQ); gcCpuLoad = getGcCpuLoad(); cpuLoad = getCpuLoad(); } } /** * @return GC CPU load. */ private double getGcCpuLoad() { long gcTime = 0; for (GarbageCollectorMXBean bean : gc) { long colTime = bean.getCollectionTime(); if (colTime > 0) gcTime += colTime; } gcTime /= metrics.getAvailableProcessors(); double gc = 0; if (prevGcTime > 0) { long gcTimeDiff = gcTime - prevGcTime; gc = (double)gcTimeDiff / METRICS_UPDATE_FREQ; } prevGcTime = gcTime; return gc; } /** * @return CPU load. */ private double getCpuLoad() { long cpuTime; try { cpuTime = U.<Long>property(os, "processCpuTime"); } catch (IgniteException ignored) { return -1; } // Method reports time in nanoseconds across all processors. cpuTime /= 1000000 * metrics.getAvailableProcessors(); double cpu = 0; if (prevCpuTime > 0) { long cpuTimeDiff = cpuTime - prevCpuTime; // CPU load could go higher than 100% because calculating of cpuTimeDiff also takes some time. cpu = Math.min(1.0, (double)cpuTimeDiff / METRICS_UPDATE_FREQ); } prevCpuTime = cpuTime; return cpu; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(MetricsUpdater.class, this, super.toString()); } } /** Discovery topology future. */ private static class DiscoTopologyFuture extends GridFutureAdapter<Long> implements GridLocalEventListener { /** */ private static final long serialVersionUID = 0L; /** */ private GridKernalContext ctx; /** Topology await version. */ private long awaitVer; /** Empty constructor required by {@link Externalizable}. */ private DiscoTopologyFuture() { // No-op. } /** * @param ctx Context. * @param awaitVer Await version. */ private DiscoTopologyFuture(GridKernalContext ctx, long awaitVer) { this.ctx = ctx; this.awaitVer = awaitVer; } /** Initializes future. */ private void init() { ctx.event().addLocalEventListener(this, EVT_NODE_JOINED, EVT_NODE_LEFT, EVT_NODE_FAILED); // Close potential window. long topVer = ctx.discovery().topologyVersion(); if (topVer >= awaitVer) onDone(topVer); } /** {@inheritDoc} */ @Override public boolean onDone(@Nullable Long res, @Nullable Throwable err) { if (super.onDone(res, err)) { ctx.event().removeLocalEventListener(this, EVT_NODE_JOINED, EVT_NODE_LEFT, EVT_NODE_FAILED); return true; } return false; } /** {@inheritDoc} */ @Override public void onEvent(Event evt) { assert evt.type() == EVT_NODE_JOINED || evt.type() == EVT_NODE_LEFT || evt.type() == EVT_NODE_FAILED; DiscoveryEvent discoEvt = (DiscoveryEvent)evt; if (discoEvt.topologyVersion() >= awaitVer) onDone(discoEvt.topologyVersion()); } } /** * */ private static class Snapshot { /** */ private final AffinityTopologyVersion topVer; /** */ private final DiscoCache discoCache; /** * @param topVer Topology version. * @param discoCache Disco cache. */ private Snapshot(AffinityTopologyVersion topVer, DiscoCache discoCache) { this.topVer = topVer; this.discoCache = discoCache; } } /** Cache for discovery collections. */ private class DiscoCache { /** Remote nodes. */ private final List<ClusterNode> rmtNodes; /** All nodes. */ private final List<ClusterNode> allNodes; /** All nodes with at least one cache configured. */ @GridToStringInclude private final Collection<ClusterNode> allNodesWithCaches; /** All nodes with at least one cache configured. */ @GridToStringInclude private final Collection<ClusterNode> rmtNodesWithCaches; /** Cache nodes by cache name. */ @GridToStringInclude private final Map<String, Collection<ClusterNode>> allCacheNodes; /** Remote cache nodes by cache name. */ @GridToStringInclude private final Map<String, Collection<ClusterNode>> rmtCacheNodes; /** Cache nodes by cache name. */ @GridToStringInclude private final Map<String, Collection<ClusterNode>> affCacheNodes; /** Caches where at least one node has near cache enabled. */ @GridToStringInclude private final Set<String> nearEnabledCaches; /** Nodes grouped by version. */ private final NavigableMap<IgniteProductVersion, Collection<ClusterNode>> nodesByVer; /** Daemon nodes. */ private final List<ClusterNode> daemonNodes; /** Node map. */ private final Map<UUID, ClusterNode> nodeMap; /** Local node. */ private final ClusterNode loc; /** Highest node order. */ private final long maxOrder; /** * Cached alive nodes list. As long as this collection doesn't accept {@code null}s use {@link * #maskNull(String)} before passing raw cache names to it. */ private final ConcurrentMap<String, Collection<ClusterNode>> aliveCacheNodes; /** * Cached alive remote nodes list. As long as this collection doesn't accept {@code null}s use {@link * #maskNull(String)} before passing raw cache names to it. */ private final ConcurrentMap<String, Collection<ClusterNode>> aliveRmtCacheNodes; /** * Cached alive remote nodes with caches. */ private final Collection<ClusterNode> aliveNodesWithCaches; /** * Cached alive remote nodes with caches. */ private final Collection<ClusterNode> aliveRmtNodesWithCaches; /** * @param loc Local node. * @param rmts Remote nodes. */ private DiscoCache(ClusterNode loc, Collection<ClusterNode> rmts) { this.loc = loc; rmtNodes = Collections.unmodifiableList(new ArrayList<>(F.view(rmts, daemonFilter))); assert !rmtNodes.contains(loc) : "Remote nodes collection shouldn't contain local node" + " [rmtNodes=" + rmtNodes + ", loc=" + loc + ']'; List<ClusterNode> all = new ArrayList<>(rmtNodes.size() + 1); if (!loc.isDaemon()) all.add(loc); all.addAll(rmtNodes); allNodes = Collections.unmodifiableList(all); Map<String, Collection<ClusterNode>> cacheMap = new HashMap<>(allNodes.size(), 1.0f); Map<String, Collection<ClusterNode>> rmtCacheMap = new HashMap<>(allNodes.size(), 1.0f); Map<String, Collection<ClusterNode>> dhtNodesMap = new HashMap<>(allNodes.size(), 1.0f); Collection<ClusterNode> nodesWithCaches = new HashSet<>(allNodes.size()); Collection<ClusterNode> rmtNodesWithCaches = new HashSet<>(allNodes.size()); aliveCacheNodes = new ConcurrentHashMap8<>(allNodes.size(), 1.0f); aliveRmtCacheNodes = new ConcurrentHashMap8<>(allNodes.size(), 1.0f); aliveNodesWithCaches = new ConcurrentSkipListSet<>(); aliveRmtNodesWithCaches = new ConcurrentSkipListSet<>(); nodesByVer = new TreeMap<>(); long maxOrder0 = 0; Set<String> nearEnabledSet = new HashSet<>(); for (ClusterNode node : allNodes) { assert node.order() != 0 : "Invalid node order [locNode=" + loc + ", node=" + node + ']'; if (node.order() > maxOrder0) maxOrder0 = node.order(); boolean hasCaches = false; for (Map.Entry<String, CachePredicate> entry : registeredCaches.entrySet()) { String cacheName = entry.getKey(); CachePredicate filter = entry.getValue(); if (filter.cacheNode(node)) { nodesWithCaches.add(node); if (!loc.id().equals(node.id())) rmtNodesWithCaches.add(node); addToMap(cacheMap, cacheName, node); if (alive(node.id())) addToMap(aliveCacheNodes, maskNull(cacheName), node); if (filter.dataNode(node)) addToMap(dhtNodesMap, cacheName, node); if (filter.nearNode(node)) nearEnabledSet.add(cacheName); if (!loc.id().equals(node.id())) { addToMap(rmtCacheMap, cacheName, node); if (alive(node.id())) addToMap(aliveRmtCacheNodes, maskNull(cacheName), node); } hasCaches = true; } } if (hasCaches) { if (alive(node.id())) { aliveNodesWithCaches.add(node); if (!loc.id().equals(node.id())) aliveRmtNodesWithCaches.add(node); } } IgniteProductVersion nodeVer = U.productVersion(node); // Create collection for this version if it does not exist. Collection<ClusterNode> nodes = nodesByVer.get(nodeVer); if (nodes == null) { nodes = new ArrayList<>(allNodes.size()); nodesByVer.put(nodeVer, nodes); } nodes.add(node); } // Need second iteration to add this node to all previous node versions. for (ClusterNode node : allNodes) { IgniteProductVersion nodeVer = U.productVersion(node); // Get all versions lower or equal node's version. NavigableMap<IgniteProductVersion, Collection<ClusterNode>> updateView = nodesByVer.headMap(nodeVer, false); for (Collection<ClusterNode> prevVersions : updateView.values()) prevVersions.add(node); } maxOrder = maxOrder0; allCacheNodes = Collections.unmodifiableMap(cacheMap); rmtCacheNodes = Collections.unmodifiableMap(rmtCacheMap); affCacheNodes = Collections.unmodifiableMap(dhtNodesMap); allNodesWithCaches = Collections.unmodifiableCollection(nodesWithCaches); this.rmtNodesWithCaches = Collections.unmodifiableCollection(rmtNodesWithCaches); nearEnabledCaches = Collections.unmodifiableSet(nearEnabledSet); daemonNodes = Collections.unmodifiableList(new ArrayList<>( F.view(F.concat(false, loc, rmts), F0.not(daemonFilter)))); Map<UUID, ClusterNode> nodeMap = new HashMap<>(allNodes().size() + daemonNodes.size(), 1.0f); for (ClusterNode n : F.concat(false, allNodes(), daemonNodes())) nodeMap.put(n.id(), n); this.nodeMap = nodeMap; } /** * Adds node to map. * * @param cacheMap Map to add to. * @param cacheName Cache name. * @param rich Node to add */ private void addToMap(Map<String, Collection<ClusterNode>> cacheMap, String cacheName, ClusterNode rich) { Collection<ClusterNode> cacheNodes = cacheMap.get(cacheName); if (cacheNodes == null) { cacheNodes = new ArrayList<>(allNodes.size()); cacheMap.put(cacheName, cacheNodes); } cacheNodes.add(rich); } /** @return Local node. */ ClusterNode localNode() { return loc; } /** @return Remote nodes. */ Collection<ClusterNode> remoteNodes() { return rmtNodes; } /** @return All nodes. */ Collection<ClusterNode> allNodes() { return allNodes; } /** * @return All nodes with at least one cache configured. */ Collection<ClusterNode> allNodesWithCaches() { return allNodesWithCaches; } /** * Gets collection of nodes which have version equal or greater than {@code ver}. * * @param ver Version to check. * @return Collection of nodes with version equal or greater than {@code ver}. */ Collection<ClusterNode> elderNodes(IgniteProductVersion ver) { Map.Entry<IgniteProductVersion, Collection<ClusterNode>> entry = nodesByVer.ceilingEntry(ver); if (entry == null) return Collections.emptyList(); return entry.getValue(); } /** * @return Versions map. */ NavigableMap<IgniteProductVersion, Collection<ClusterNode>> versionsMap() { return nodesByVer; } /** * Gets collection of nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of nodes. */ Collection<ClusterNode> allNodesWithCaches(final long topVer) { return filter(topVer, allNodesWithCaches); } /** * Gets all nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> cacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, allCacheNodes.get(cacheName)); } /** * Gets all remote nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> remoteCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, rmtCacheNodes.get(cacheName)); } /** * Gets all remote nodes that have at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> remoteCacheNodes(final long topVer) { return filter(topVer, rmtNodesWithCaches); } /** * Gets all nodes that have cache with given name and should participate in affinity calculation. With * partitioned cache nodes with near-only cache do not participate in affinity node calculation. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> cacheAffinityNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, affCacheNodes.get(cacheName)); } /** * Gets all alive nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, aliveCacheNodes.get(maskNull(cacheName))); } /** * Gets all alive remote nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveRemoteCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, aliveRmtCacheNodes.get(maskNull(cacheName))); } /** * Gets all alive remote nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveRemoteNodesWithCaches(final long topVer) { return filter(topVer, aliveRmtNodesWithCaches); } /** * Gets all alive remote nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveNodesWithCaches(final long topVer) { return filter(topVer, aliveNodesWithCaches); } /** * Checks if cache with given name has at least one node with near cache enabled. * * @param cacheName Cache name. * @return {@code True} if cache with given name has at least one node with near cache enabled. */ boolean hasNearCache(@Nullable String cacheName) { return nearEnabledCaches.contains(cacheName); } /** * Removes left node from cached alives lists. * * @param leftNode Left node. */ void updateAlives(ClusterNode leftNode) { if (leftNode.order() > maxOrder) return; filterNodeMap(aliveCacheNodes, leftNode); filterNodeMap(aliveRmtCacheNodes, leftNode); aliveNodesWithCaches.remove(leftNode); aliveRmtNodesWithCaches.remove(leftNode); } /** * Creates a copy of nodes map without the given node. * * @param map Map to copy. * @param exclNode Node to exclude. */ private void filterNodeMap(ConcurrentMap<String, Collection<ClusterNode>> map, final ClusterNode exclNode) { for (String cacheName : registeredCaches.keySet()) { String maskedName = maskNull(cacheName); while (true) { Collection<ClusterNode> oldNodes = map.get(maskedName); if (oldNodes == null || oldNodes.isEmpty()) break; Collection<ClusterNode> newNodes = new ArrayList<>(oldNodes); if (!newNodes.remove(exclNode)) break; if (map.replace(maskedName, oldNodes, newNodes)) break; } } } /** * Replaces {@code null} with {@code NULL_CACHE_NAME}. * * @param cacheName Cache name. * @return Masked name. */ private String maskNull(@Nullable String cacheName) { return cacheName == null ? NULL_CACHE_NAME : cacheName; } /** * @param topVer Topology version. * @param nodes Nodes. * @return Filtered collection (potentially empty, but never {@code null}). */ private Collection<ClusterNode> filter(final long topVer, @Nullable Collection<ClusterNode> nodes) { if (nodes == null) return Collections.emptyList(); // If no filtering needed, return original collection. return nodes.isEmpty() || topVer < 0 || topVer >= maxOrder ? nodes : F.view(nodes, new P1<ClusterNode>() { @Override public boolean apply(ClusterNode node) { return node.order() <= topVer; } }); } /** @return Daemon nodes. */ Collection<ClusterNode> daemonNodes() { return daemonNodes; } /** * @param id Node ID. * @return Node. */ @Nullable ClusterNode node(UUID id) { return nodeMap.get(id); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DiscoCache.class, this, "allNodesWithDaemons", U.toShortString(allNodes)); } } /** * Cache predicate. */ private static class CachePredicate { /** Cache filter. */ private IgnitePredicate<ClusterNode> cacheFilter; /** If near cache is enabled on data nodes. */ private boolean nearEnabled; /** Flag indicating if cache is local. */ private boolean loc; /** Collection of client near nodes. */ private Map<UUID, Boolean> clientNodes; /** * @param cacheFilter Cache filter. * @param nearEnabled Near enabled flag. */ private CachePredicate(IgnitePredicate<ClusterNode> cacheFilter, boolean nearEnabled, boolean loc) { assert cacheFilter != null; this.cacheFilter = cacheFilter; this.nearEnabled = nearEnabled; this.loc = loc; clientNodes = new ConcurrentHashMap<>(); } /** * @param nodeId Near node ID to add. */ public void addClientNode(UUID nodeId, boolean nearEnabled) { clientNodes.put(nodeId, nearEnabled); } /** * @param leftNodeId Left node ID. */ public void onNodeLeft(UUID leftNodeId) { clientNodes.remove(leftNodeId); } /** * @param node Node to check. * @return {@code True} if this node is a data node for given cache. */ public boolean dataNode(ClusterNode node) { return !node.isDaemon() && cacheFilter.apply(node); } /** * @param node Node to check. * @return {@code True} if cache is accessible on the given node. */ public boolean cacheNode(ClusterNode node) { return !node.isClient() && !node.isDaemon() && (cacheFilter.apply(node) || clientNodes.containsKey(node.id())); } /** * @param node Node to check. * @return {@code True} if near cache is present on the given nodes. */ public boolean nearNode(ClusterNode node) { if (node.isDaemon()) return false; if (nearEnabled && cacheFilter.apply(node)) return true; Boolean near = clientNodes.get(node.id()); return near != null && near; } /** * @param node Node to check. * @return {@code True} if client cache is present on the given nodes. */ public boolean clientNode(ClusterNode node) { if (node.isDaemon()) return false; Boolean near = clientNodes.get(node.id()); return near != null && !near; } } }
modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.managers.discovery; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cluster.*; import org.apache.ignite.events.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.events.*; import org.apache.ignite.internal.managers.*; import org.apache.ignite.internal.managers.communication.*; import org.apache.ignite.internal.managers.eventstorage.*; import org.apache.ignite.internal.processors.affinity.*; import org.apache.ignite.internal.processors.cache.*; import org.apache.ignite.internal.processors.jobmetrics.*; import org.apache.ignite.internal.processors.security.*; import org.apache.ignite.internal.util.*; import org.apache.ignite.internal.util.future.*; import org.apache.ignite.internal.util.lang.*; import org.apache.ignite.internal.util.tostring.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.internal.util.worker.*; import org.apache.ignite.lang.*; import org.apache.ignite.plugin.security.*; import org.apache.ignite.plugin.segmentation.*; import org.apache.ignite.spi.*; import org.apache.ignite.spi.discovery.*; import org.apache.ignite.thread.*; import org.jetbrains.annotations.*; import org.jsr166.*; import java.io.*; import java.lang.management.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import java.util.zip.*; import static java.util.concurrent.TimeUnit.*; import static org.apache.ignite.events.EventType.*; import static org.apache.ignite.internal.IgniteNodeAttributes.*; import static org.apache.ignite.internal.IgniteVersionUtils.*; import static org.apache.ignite.plugin.segmentation.SegmentationPolicy.*; /** * Discovery SPI manager. */ public class GridDiscoveryManager extends GridManagerAdapter<DiscoverySpi> { /** Fake key for {@code null}-named caches. Used inside {@link DiscoCache}. */ private static final String NULL_CACHE_NAME = UUID.randomUUID().toString(); /** Metrics update frequency. */ private static final long METRICS_UPDATE_FREQ = 3000; /** */ private static final MemoryMXBean mem = ManagementFactory.getMemoryMXBean(); /** */ private static final OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean(); /** */ private static final RuntimeMXBean rt = ManagementFactory.getRuntimeMXBean(); /** */ private static final ThreadMXBean threads = ManagementFactory.getThreadMXBean(); /** */ private static final Collection<GarbageCollectorMXBean> gc = ManagementFactory.getGarbageCollectorMXBeans(); /** */ private static final String PREFIX = "Topology snapshot"; /** Discovery cached history size. */ protected static final int DISCOVERY_HISTORY_SIZE = 100; /** Predicate filtering out daemon nodes. */ private static final IgnitePredicate<ClusterNode> daemonFilter = new P1<ClusterNode>() { @Override public boolean apply(ClusterNode n) { return !n.isDaemon(); } }; /** Disco history entries comparator. */ private static final Comparator<Map.Entry<AffinityTopologyVersion, DiscoCache>> histCmp = new Comparator<Map.Entry<AffinityTopologyVersion, DiscoCache>>() { @Override public int compare(Map.Entry<AffinityTopologyVersion, DiscoCache> o1, Map.Entry<AffinityTopologyVersion, DiscoCache> o2) { return o1.getKey().compareTo(o2.getKey()); } }; /** Discovery event worker. */ private final DiscoveryWorker discoWrk = new DiscoveryWorker(); /** Network segment check worker. */ private SegmentCheckWorker segChkWrk; /** Network segment check thread. */ private IgniteThread segChkThread; /** Last logged topology. */ private final AtomicLong lastLoggedTop = new AtomicLong(); /** Local node. */ private ClusterNode locNode; /** Local node daemon flag. */ private boolean isLocDaemon; /** {@code True} if resolvers were configured and network segment check is enabled. */ private boolean hasRslvrs; /** Last segment check result. */ private final AtomicBoolean lastSegChkRes = new AtomicBoolean(true); /** Topology cache history. */ private final Map<AffinityTopologyVersion, DiscoCache> discoCacheHist = new GridBoundedConcurrentLinkedHashMap<>(DISCOVERY_HISTORY_SIZE, DISCOVERY_HISTORY_SIZE, 0.7f, 1); /** Topology snapshots history. */ private volatile Map<Long, Collection<ClusterNode>> topHist = new HashMap<>(); /** Topology version. */ private final AtomicReference<Snapshot> topSnap = new AtomicReference<>(new Snapshot(AffinityTopologyVersion.ZERO, null)); /** Minor topology version. */ private int minorTopVer; /** Order supported flag. */ private boolean discoOrdered; /** Topology snapshots history supported flag. */ private boolean histSupported; /** Configured network segment check frequency. */ private long segChkFreq; /** Local node join to topology event. */ private GridFutureAdapter<DiscoveryEvent> locJoinEvt = new GridFutureAdapter<>(); /** GC CPU load. */ private volatile double gcCpuLoad; /** CPU load. */ private volatile double cpuLoad; /** Metrics. */ private final GridLocalMetrics metrics = createMetrics(); /** Metrics update worker. */ private final MetricsUpdater metricsUpdater = new MetricsUpdater(); /** Custom event listener. */ private GridPlainInClosure<Serializable> customEvtLsnr; /** Map of dynamic cache filters. */ private Map<String, CachePredicate> registeredCaches = new HashMap<>(); /** @param ctx Context. */ public GridDiscoveryManager(GridKernalContext ctx) { super(ctx, ctx.config().getDiscoverySpi()); } /** * @return Memory usage of non-heap memory. */ private MemoryUsage nonHeapMemoryUsage() { // Workaround of exception in WebSphere. // We received the following exception: // java.lang.IllegalArgumentException: used value cannot be larger than the committed value // at java.lang.management.MemoryUsage.<init>(MemoryUsage.java:105) // at com.ibm.lang.management.MemoryMXBeanImpl.getNonHeapMemoryUsageImpl(Native Method) // at com.ibm.lang.management.MemoryMXBeanImpl.getNonHeapMemoryUsage(MemoryMXBeanImpl.java:143) // at org.apache.ignite.spi.metrics.jdk.GridJdkLocalMetricsSpi.getMetrics(GridJdkLocalMetricsSpi.java:242) // // We so had to workaround this with exception handling, because we can not control classes from WebSphere. try { return mem.getNonHeapMemoryUsage(); } catch (IllegalArgumentException ignored) { return new MemoryUsage(0, 0, 0, 0); } } /** {@inheritDoc} */ @Override public void onBeforeSpiStart() { DiscoverySpi spi = getSpi(); spi.setNodeAttributes(ctx.nodeAttributes(), VER); } /** * Adds dynamic cache filter. * * @param cacheName Cache name. * @param filter Cache filter. * @param loc {@code True} if cache is local. */ public void setCacheFilter( String cacheName, IgnitePredicate<ClusterNode> filter, boolean nearEnabled, boolean loc ) { if (!registeredCaches.containsKey(cacheName)) registeredCaches.put(cacheName, new CachePredicate(filter, nearEnabled, loc)); } /** * Removes dynamic cache filter. * * @param cacheName Cache name. */ public void removeCacheFilter(String cacheName) { registeredCaches.remove(cacheName); } /** * Adds near node ID to cache filter. * * @param cacheName Cache name. * @param clientNodeId Near node ID. */ public void addClientNode(String cacheName, UUID clientNodeId, boolean nearEnabled) { CachePredicate predicate = registeredCaches.get(cacheName); if (predicate != null) predicate.addClientNode(clientNodeId, nearEnabled); } /** * @return Client nodes map. */ public Map<String, Map<UUID, Boolean>> clientNodesMap() { Map<String, Map<UUID, Boolean>> res = null; for (Map.Entry<String, CachePredicate> entry : registeredCaches.entrySet()) { CachePredicate pred = entry.getValue(); if (!F.isEmpty(pred.clientNodes)) { if (res == null) res = U.newHashMap(registeredCaches.size()); res.put(entry.getKey(), new HashMap<>(pred.clientNodes)); } } return res; } /** * @param leftNodeId Left node ID. */ private void updateClientNodes(UUID leftNodeId) { for (Map.Entry<String, CachePredicate> entry : registeredCaches.entrySet()) { CachePredicate pred = entry.getValue(); pred.onNodeLeft(leftNodeId); } } /** * @param evtType Event type. * @return Next affinity topology version. */ private AffinityTopologyVersion nextTopologyVersion(int evtType, long topVer) { if (evtType == DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT) minorTopVer++; else if (evtType != EVT_NODE_METRICS_UPDATED) minorTopVer = 0; return new AffinityTopologyVersion(topVer, minorTopVer); } /** {@inheritDoc} */ @Override public void start() throws IgniteCheckedException { long totSysMemory = -1; try { totSysMemory = U.<Long>property(os, "totalPhysicalMemorySize"); } catch (RuntimeException ignored) { // No-op. } ctx.addNodeAttribute(IgniteNodeAttributes.ATTR_PHY_RAM, totSysMemory); DiscoverySpi spi = getSpi(); discoOrdered = discoOrdered(); histSupported = historySupported(); isLocDaemon = ctx.isDaemon(); hasRslvrs = !F.isEmpty(ctx.config().getSegmentationResolvers()); segChkFreq = ctx.config().getSegmentCheckFrequency(); if (hasRslvrs) { if (segChkFreq < 0) throw new IgniteCheckedException("Segment check frequency cannot be negative: " + segChkFreq); if (segChkFreq > 0 && segChkFreq < 2000) U.warn(log, "Configuration parameter 'segmentCheckFrequency' is too low " + "(at least 2000 ms recommended): " + segChkFreq); checkSegmentOnStart(); } new IgniteThread(metricsUpdater).start(); spi.setMetricsProvider(createMetricsProvider()); if (ctx.security().enabled()) { spi.setAuthenticator(new DiscoverySpiNodeAuthenticator() { @Override public SecurityContext authenticateNode(ClusterNode node, SecurityCredentials cred) { try { return ctx.security().authenticateNode(node, cred); } catch (IgniteCheckedException e) { throw U.convertException(e); } } @Override public boolean isGlobalNodeAuthentication() { return ctx.security().isGlobalNodeAuthentication(); } }); } spi.setListener(new DiscoverySpiListener() { @Override public void onDiscovery( int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot, Map<Long, Collection<ClusterNode>> snapshots, @Nullable Serializable data ) { final ClusterNode locNode = localNode(); if (snapshots != null) topHist = snapshots; AffinityTopologyVersion nextTopVer = nextTopologyVersion(type, topVer); if (type == EVT_NODE_FAILED || type == EVT_NODE_LEFT) { for (DiscoCache c : discoCacheHist.values()) c.updateAlives(node); updateClientNodes(node.id()); } if (type == DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT) { try { if (customEvtLsnr != null) customEvtLsnr.apply(data); } catch (Exception e) { U.error(log, "Failed to notify direct custom event listener: " + data, e); } } // Put topology snapshot into discovery history. // There is no race possible between history maintenance and concurrent discovery // event notifications, since SPI notifies manager about all events from this listener. if (type != EVT_NODE_METRICS_UPDATED) { DiscoCache cache = new DiscoCache(locNode, F.view(topSnapshot, F.remoteNodes(locNode.id()))); discoCacheHist.put(nextTopVer, cache); boolean set = updateTopologyVersionIfGreater(nextTopVer, cache); assert set || topVer == 0 : "Topology version has not been updated [this.topVer=" + topSnap + ", topVer=" + topVer + ", node=" + node + ", evt=" + U.gridEventName(type) + ']'; } // If this is a local join event, just save it and do not notify listeners. if (type == EVT_NODE_JOINED && node.id().equals(locNode.id())) { DiscoveryEvent discoEvt = new DiscoveryEvent(); discoEvt.node(ctx.discovery().localNode()); discoEvt.eventNode(node); discoEvt.type(EVT_NODE_JOINED); discoEvt.topologySnapshot(topVer, new ArrayList<>( F.viewReadOnly(topSnapshot, new C1<ClusterNode, ClusterNode>() { @Override public ClusterNode apply(ClusterNode e) { return e; } }, daemonFilter))); locJoinEvt.onDone(discoEvt); return; } discoWrk.addEvent(type, nextTopVer, node, topSnapshot, data); } }); spi.setDataExchange(new DiscoverySpiDataExchange() { @Override public Map<Integer, Serializable> collect(UUID nodeId) { assert nodeId != null; Map<Integer, Serializable> data = new HashMap<>(); for (GridComponent comp : ctx.components()) { Serializable compData = comp.collectDiscoveryData(nodeId); if (compData != null) { assert comp.discoveryDataType() != null; data.put(comp.discoveryDataType().ordinal(), compData); } } return data; } @Override public void onExchange(UUID joiningNodeId, UUID nodeId, Map<Integer, Serializable> data) { for (Map.Entry<Integer, Serializable> e : data.entrySet()) { GridComponent comp = null; for (GridComponent c : ctx.components()) { if (c.discoveryDataType() != null && c.discoveryDataType().ordinal() == e.getKey()) { comp = c; break; } } if (comp != null) comp.onDiscoveryDataReceived(joiningNodeId, nodeId, e.getValue()); else U.warn(log, "Received discovery data for unknown component: " + e.getKey()); } } }); startSpi(); // Start segment check worker only if frequency is greater than 0. if (hasRslvrs && segChkFreq > 0) { segChkWrk = new SegmentCheckWorker(); segChkThread = new IgniteThread(segChkWrk); segChkThread.start(); } locNode = spi.getLocalNode(); updateTopologyVersionIfGreater(new AffinityTopologyVersion(locNode.order()), new DiscoCache(localNode(), getSpi().getRemoteNodes())); checkAttributes(discoCache().remoteNodes()); // Start discovery worker. new IgniteThread(discoWrk).start(); if (log.isDebugEnabled()) log.debug(startInfo()); } /** * @param customEvtLsnr Custom event listener. */ public void setCustomEventListener(GridPlainInClosure<Serializable> customEvtLsnr) { this.customEvtLsnr = customEvtLsnr; } /** * @return Metrics. */ private GridLocalMetrics createMetrics() { return new GridLocalMetrics() { @Override public int getAvailableProcessors() { return os.getAvailableProcessors(); } @Override public double getCurrentCpuLoad() { return cpuLoad; } @Override public double getCurrentGcCpuLoad() { return gcCpuLoad; } @Override public long getHeapMemoryInitialized() { return mem.getHeapMemoryUsage().getInit(); } @Override public long getHeapMemoryUsed() { return mem.getHeapMemoryUsage().getUsed(); } @Override public long getHeapMemoryCommitted() { return mem.getHeapMemoryUsage().getCommitted(); } @Override public long getHeapMemoryMaximum() { return mem.getHeapMemoryUsage().getMax(); } @Override public long getNonHeapMemoryInitialized() { return nonHeapMemoryUsage().getInit(); } @Override public long getNonHeapMemoryUsed() { return nonHeapMemoryUsage().getUsed(); } @Override public long getNonHeapMemoryCommitted() { return nonHeapMemoryUsage().getCommitted(); } @Override public long getNonHeapMemoryMaximum() { return nonHeapMemoryUsage().getMax(); } @Override public long getUptime() { return rt.getUptime(); } @Override public long getStartTime() { return rt.getStartTime(); } @Override public int getThreadCount() { return threads.getThreadCount(); } @Override public int getPeakThreadCount() { return threads.getPeakThreadCount(); } @Override public long getTotalStartedThreadCount() { return threads.getTotalStartedThreadCount(); } @Override public int getDaemonThreadCount() { return threads.getDaemonThreadCount(); } }; } /** * @return Metrics provider. */ private DiscoveryMetricsProvider createMetricsProvider() { return new DiscoveryMetricsProvider() { /** */ private final long startTime = U.currentTimeMillis(); /** {@inheritDoc} */ @Override public ClusterMetrics metrics() { GridJobMetrics jm = ctx.jobMetric().getJobMetrics(); ClusterMetricsSnapshot nm = new ClusterMetricsSnapshot(); nm.setLastUpdateTime(U.currentTimeMillis()); // Job metrics. nm.setMaximumActiveJobs(jm.getMaximumActiveJobs()); nm.setCurrentActiveJobs(jm.getCurrentActiveJobs()); nm.setAverageActiveJobs(jm.getAverageActiveJobs()); nm.setMaximumWaitingJobs(jm.getMaximumWaitingJobs()); nm.setCurrentWaitingJobs(jm.getCurrentWaitingJobs()); nm.setAverageWaitingJobs(jm.getAverageWaitingJobs()); nm.setMaximumRejectedJobs(jm.getMaximumRejectedJobs()); nm.setCurrentRejectedJobs(jm.getCurrentRejectedJobs()); nm.setAverageRejectedJobs(jm.getAverageRejectedJobs()); nm.setMaximumCancelledJobs(jm.getMaximumCancelledJobs()); nm.setCurrentCancelledJobs(jm.getCurrentCancelledJobs()); nm.setAverageCancelledJobs(jm.getAverageCancelledJobs()); nm.setTotalRejectedJobs(jm.getTotalRejectedJobs()); nm.setTotalCancelledJobs(jm.getTotalCancelledJobs()); nm.setTotalExecutedJobs(jm.getTotalExecutedJobs()); nm.setMaximumJobWaitTime(jm.getMaximumJobWaitTime()); nm.setCurrentJobWaitTime(jm.getCurrentJobWaitTime()); nm.setAverageJobWaitTime(jm.getAverageJobWaitTime()); nm.setMaximumJobExecuteTime(jm.getMaximumJobExecuteTime()); nm.setCurrentJobExecuteTime(jm.getCurrentJobExecuteTime()); nm.setAverageJobExecuteTime(jm.getAverageJobExecuteTime()); nm.setCurrentIdleTime(jm.getCurrentIdleTime()); nm.setTotalIdleTime(jm.getTotalIdleTime()); nm.setAverageCpuLoad(jm.getAverageCpuLoad()); // Job metrics. nm.setTotalExecutedTasks(ctx.task().getTotalExecutedTasks()); // VM metrics. nm.setAvailableProcessors(metrics.getAvailableProcessors()); nm.setCurrentCpuLoad(metrics.getCurrentCpuLoad()); nm.setCurrentGcCpuLoad(metrics.getCurrentGcCpuLoad()); nm.setHeapMemoryInitialized(metrics.getHeapMemoryInitialized()); nm.setHeapMemoryUsed(metrics.getHeapMemoryUsed()); nm.setHeapMemoryCommitted(metrics.getHeapMemoryCommitted()); nm.setHeapMemoryMaximum(metrics.getHeapMemoryMaximum()); nm.setHeapMemoryTotal(metrics.getHeapMemoryMaximum()); nm.setNonHeapMemoryInitialized(metrics.getNonHeapMemoryInitialized()); nm.setNonHeapMemoryUsed(metrics.getNonHeapMemoryUsed()); nm.setNonHeapMemoryCommitted(metrics.getNonHeapMemoryCommitted()); nm.setNonHeapMemoryMaximum(metrics.getNonHeapMemoryMaximum()); nm.setNonHeapMemoryTotal(metrics.getNonHeapMemoryMaximum()); nm.setUpTime(metrics.getUptime()); nm.setStartTime(metrics.getStartTime()); nm.setNodeStartTime(startTime); nm.setCurrentThreadCount(metrics.getThreadCount()); nm.setMaximumThreadCount(metrics.getPeakThreadCount()); nm.setTotalStartedThreadCount(metrics.getTotalStartedThreadCount()); nm.setCurrentDaemonThreadCount(metrics.getDaemonThreadCount()); nm.setTotalNodes(1); // Data metrics. nm.setLastDataVersion(ctx.cache().lastDataVersion()); GridIoManager io = ctx.io(); // IO metrics. nm.setSentMessagesCount(io.getSentMessagesCount()); nm.setSentBytesCount(io.getSentBytesCount()); nm.setReceivedMessagesCount(io.getReceivedMessagesCount()); nm.setReceivedBytesCount(io.getReceivedBytesCount()); nm.setOutboundMessagesQueueSize(io.getOutboundMessagesQueueSize()); return nm; } /** {@inheritDoc} */ @Override public Map<Integer, CacheMetrics> cacheMetrics() { Collection<GridCacheAdapter<?, ?>> caches = ctx.cache().internalCaches(); if (F.isEmpty(caches)) return Collections.emptyMap(); Map<Integer, CacheMetrics> metrics = null; for (GridCacheAdapter<?, ?> cache : caches) { if (cache.configuration().isStatisticsEnabled()) { if (metrics == null) metrics = U.newHashMap(caches.size()); metrics.put(cache.context().cacheId(), cache.metrics()); } } return metrics == null ? Collections.<Integer, CacheMetrics>emptyMap() : metrics; } }; } /** * @return Local metrics. */ public GridLocalMetrics metrics() { return metrics; } /** @return {@code True} if ordering is supported. */ private boolean discoOrdered() { DiscoverySpiOrderSupport ann = U.getAnnotation(ctx.config().getDiscoverySpi().getClass(), DiscoverySpiOrderSupport.class); return ann != null && ann.value(); } /** @return {@code True} if topology snapshots history is supported. */ private boolean historySupported() { DiscoverySpiHistorySupport ann = U.getAnnotation(ctx.config().getDiscoverySpi().getClass(), DiscoverySpiHistorySupport.class); return ann != null && ann.value(); } /** * Checks segment on start waiting for correct segment if necessary. * * @throws IgniteCheckedException If check failed. */ private void checkSegmentOnStart() throws IgniteCheckedException { assert hasRslvrs; if (log.isDebugEnabled()) log.debug("Starting network segment check."); while (true) { if (ctx.segmentation().isValidSegment()) break; if (ctx.config().isWaitForSegmentOnStart()) { LT.warn(log, null, "Failed to check network segment (retrying every 2000 ms)."); // Wait and check again. U.sleep(2000); } else throw new IgniteCheckedException("Failed to check network segment."); } if (log.isDebugEnabled()) log.debug("Finished network segment check successfully."); } /** * Checks whether attributes of the local node are consistent with remote nodes. * * @param nodes List of remote nodes to check attributes on. * @throws IgniteCheckedException In case of error. */ private void checkAttributes(Iterable<ClusterNode> nodes) throws IgniteCheckedException { ClusterNode locNode = getSpi().getLocalNode(); assert locNode != null; // Fetch local node attributes once. String locPreferIpV4 = locNode.attribute("java.net.preferIPv4Stack"); Object locMode = locNode.attribute(ATTR_DEPLOYMENT_MODE); boolean locP2pEnabled = locNode.attribute(ATTR_PEER_CLASSLOADING); boolean warned = false; for (ClusterNode n : nodes) { String rmtPreferIpV4 = n.attribute("java.net.preferIPv4Stack"); if (!F.eq(rmtPreferIpV4, locPreferIpV4)) { if (!warned) U.warn(log, "Local node's value of 'java.net.preferIPv4Stack' " + "system property differs from remote node's " + "(all nodes in topology should have identical value) " + "[locPreferIpV4=" + locPreferIpV4 + ", rmtPreferIpV4=" + rmtPreferIpV4 + ", locId8=" + U.id8(locNode.id()) + ", rmtId8=" + U.id8(n.id()) + ", rmtAddrs=" + U.addressesAsString(n) + ']', "Local and remote 'java.net.preferIPv4Stack' system properties do not match."); warned = true; } // Daemon nodes are allowed to have any deployment they need. // Skip data center ID check for daemon nodes. if (!isLocDaemon && !n.isDaemon()) { Object rmtMode = n.attribute(ATTR_DEPLOYMENT_MODE); if (!locMode.equals(rmtMode)) throw new IgniteCheckedException("Remote node has deployment mode different from local " + "[locId8=" + U.id8(locNode.id()) + ", locMode=" + locMode + ", rmtId8=" + U.id8(n.id()) + ", rmtMode=" + rmtMode + ", rmtAddrs=" + U.addressesAsString(n) + ']'); boolean rmtP2pEnabled = n.attribute(ATTR_PEER_CLASSLOADING); if (locP2pEnabled != rmtP2pEnabled) throw new IgniteCheckedException("Remote node has peer class loading enabled flag different from local " + "[locId8=" + U.id8(locNode.id()) + ", locPeerClassLoading=" + locP2pEnabled + ", rmtId8=" + U.id8(n.id()) + ", rmtPeerClassLoading=" + rmtP2pEnabled + ", rmtAddrs=" + U.addressesAsString(n) + ']'); } } if (log.isDebugEnabled()) log.debug("Finished node attributes consistency check."); } /** * @param nodes Nodes. * @return Total CPUs. */ private static int cpus(Collection<ClusterNode> nodes) { Collection<String> macSet = new HashSet<>(nodes.size(), 1.0f); int cpus = 0; for (ClusterNode n : nodes) { String macs = n.attribute(ATTR_MACS); if (macSet.add(macs)) cpus += n.metrics().getTotalCpus(); } return cpus; } /** * Prints the latest topology info into log taking into account logging/verbosity settings. */ public void ackTopology() { ackTopology(topSnap.get().topVer.topologyVersion(), false); } /** * Logs grid size for license compliance. * * @param topVer Topology version. * @param throttle Suppress printing if this topology was already printed. */ private void ackTopology(long topVer, boolean throttle) { assert !isLocDaemon; DiscoCache discoCache = discoCache(); Collection<ClusterNode> rmtNodes = discoCache.remoteNodes(); ClusterNode locNode = discoCache.localNode(); Collection<ClusterNode> allNodes = discoCache.allNodes(); long hash = topologyHash(allNodes); // Prevent ack-ing topology for the same topology. // Can happen only during node startup. if (throttle && lastLoggedTop.getAndSet(hash) == hash) return; int totalCpus = cpus(allNodes); double heap = U.heapSize(allNodes, 2); if (log.isQuiet()) U.quiet(false, topologySnapshotMessage(rmtNodes.size(), totalCpus, heap)); if (log.isDebugEnabled()) { String dbg = ""; dbg += U.nl() + U.nl() + ">>> +----------------+" + U.nl() + ">>> " + PREFIX + "." + U.nl() + ">>> +----------------+" + U.nl() + ">>> Grid name: " + (ctx.gridName() == null ? "default" : ctx.gridName()) + U.nl() + ">>> Number of nodes: " + (rmtNodes.size() + 1) + U.nl() + (discoOrdered ? ">>> Topology version: " + topVer + U.nl() : "") + ">>> Topology hash: 0x" + Long.toHexString(hash).toUpperCase() + U.nl(); dbg += ">>> Local: " + locNode.id().toString().toUpperCase() + ", " + U.addressesAsString(locNode) + ", " + locNode.order() + ", " + locNode.attribute("os.name") + ' ' + locNode.attribute("os.arch") + ' ' + locNode.attribute("os.version") + ", " + System.getProperty("user.name") + ", " + locNode.attribute("java.runtime.name") + ' ' + locNode.attribute("java.runtime.version") + U.nl(); for (ClusterNode node : rmtNodes) dbg += ">>> Remote: " + node.id().toString().toUpperCase() + ", " + U.addressesAsString(node) + ", " + node.order() + ", " + node.attribute("os.name") + ' ' + node.attribute("os.arch") + ' ' + node.attribute("os.version") + ", " + node.attribute(ATTR_USER_NAME) + ", " + node.attribute("java.runtime.name") + ' ' + node.attribute("java.runtime.version") + U.nl(); dbg += ">>> Total number of CPUs: " + totalCpus + U.nl(); dbg += ">>> Total heap size: " + heap + "GB" + U.nl(); log.debug(dbg); } else if (log.isInfoEnabled()) log.info(topologySnapshotMessage(rmtNodes.size(), totalCpus, heap)); } /** * @param rmtNodesNum Remote nodes number. * @param totalCpus Total cpu number. * @param heap Heap size. * @return Topology snapshot message. */ private String topologySnapshotMessage(int rmtNodesNum, int totalCpus, double heap) { return PREFIX + " [" + (discoOrdered ? "ver=" + topSnap.get().topVer.topologyVersion() + ", " : "") + "nodes=" + (rmtNodesNum + 1) + ", CPUs=" + totalCpus + ", heap=" + heap + "GB" + ']'; } /** {@inheritDoc} */ @Override public void onKernalStop0(boolean cancel) { // Stop segment check worker. if (segChkWrk != null) { segChkWrk.cancel(); U.join(segChkThread, log); } if (!locJoinEvt.isDone()) locJoinEvt.onDone(new IgniteCheckedException("Failed to wait for local node joined event (grid is stopping).")); } /** {@inheritDoc} */ @Override public void stop(boolean cancel) throws IgniteCheckedException { // Stop receiving notifications. getSpi().setListener(null); // Stop discovery worker and metrics updater. U.cancel(discoWrk); U.cancel(metricsUpdater); U.join(discoWrk, log); U.join(metricsUpdater, log); // Stop SPI itself. stopSpi(); if (log.isDebugEnabled()) log.debug(stopInfo()); } /** * @param nodeIds Node IDs to check. * @return {@code True} if at least one ID belongs to an alive node. */ public boolean aliveAll(@Nullable Collection<UUID> nodeIds) { if (nodeIds == null || nodeIds.isEmpty()) return false; for (UUID id : nodeIds) if (!alive(id)) return false; return true; } /** * @param nodeId Node ID. * @return {@code True} if node for given ID is alive. */ public boolean alive(UUID nodeId) { assert nodeId != null; return getSpi().getNode(nodeId) != null; // Go directly to SPI without checking disco cache. } /** * @param node Node. * @return {@code True} if node is alive. */ public boolean alive(ClusterNode node) { assert node != null; return alive(node.id()); } /** * @param nodeId ID of the node. * @return {@code True} if ping succeeded. */ public boolean pingNode(UUID nodeId) { assert nodeId != null; return getSpi().pingNode(nodeId); } /** * @param nodeId ID of the node. * @return Node for ID. */ @Nullable public ClusterNode node(UUID nodeId) { assert nodeId != null; return discoCache().node(nodeId); } /** * Gets collection of node for given node IDs and predicates. * * @param ids Ids to include. * @param p Filter for IDs. * @return Collection with all alive nodes for given IDs. */ public Collection<ClusterNode> nodes(@Nullable Collection<UUID> ids, IgnitePredicate<UUID>... p) { return F.isEmpty(ids) ? Collections.<ClusterNode>emptyList() : F.view( F.viewReadOnly(ids, U.id2Node(ctx), p), F.notNull()); } /** * Gets topology hash for given set of nodes. * * @param nodes Subset of grid nodes for hashing. * @return Hash for given topology. */ public long topologyHash(Iterable<? extends ClusterNode> nodes) { assert nodes != null; Iterator<? extends ClusterNode> iter = nodes.iterator(); if (!iter.hasNext()) return 0; // Special case. List<String> uids = new ArrayList<>(); for (ClusterNode node : nodes) uids.add(node.id().toString()); Collections.sort(uids); CRC32 hash = new CRC32(); for (String uuid : uids) hash.update(uuid.getBytes()); return hash.getValue(); } /** * Gets future that will be completed when current topology version becomes greater or equal to argument passed. * * @param awaitVer Topology version to await. * @return Future. */ public IgniteInternalFuture<Long> topologyFuture(final long awaitVer) { long topVer = topologyVersion(); if (topVer >= awaitVer) return new GridFinishedFuture<>(topVer); DiscoTopologyFuture fut = new DiscoTopologyFuture(ctx, awaitVer); fut.init(); return fut; } /** * Gets discovery collection cache from SPI safely guarding against "floating" collections. * * @return Discovery collection cache. */ public DiscoCache discoCache() { Snapshot cur; while ((cur = topSnap.get()) == null) { // Wrap the SPI collection to avoid possible floating collection. if (topSnap.compareAndSet(null, cur = new Snapshot( AffinityTopologyVersion.ZERO, new DiscoCache(localNode(), getSpi().getRemoteNodes())))) { return cur.discoCache; } } return cur.discoCache; } /** * Gets discovery collection cache from SPI safely guarding against "floating" collections. * * @return Discovery collection cache. */ public DiscoCache discoCache(AffinityTopologyVersion topVer) { return discoCacheHist.get(topVer); } /** @return All non-daemon remote nodes in topology. */ public Collection<ClusterNode> remoteNodes() { return discoCache().remoteNodes(); } /** @return All non-daemon nodes in topology. */ public Collection<ClusterNode> allNodes() { return discoCache().allNodes(); } /** * Gets topology grouped by node versions. * * @return Version to collection of nodes map. */ public NavigableMap<IgniteProductVersion, Collection<ClusterNode>> topologyVersionMap() { return discoCache().versionsMap(); } /** @return Full topology size. */ public int size() { return discoCache().allNodes().size(); } /** * Gets all nodes for given topology version. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> nodes(long topVer) { return resolveDiscoCache(null, new AffinityTopologyVersion(topVer)).allNodes(); } /** * Gets cache nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> cacheNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).cacheNodes(cacheName, topVer.topologyVersion()); } /** * Gets all nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> cacheNodes(AffinityTopologyVersion topVer) { return resolveDiscoCache(null, topVer).allNodesWithCaches(topVer.topologyVersion()); } /** * Gets cache remote nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> remoteCacheNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).remoteCacheNodes(cacheName, topVer.topologyVersion()); } /** * Gets cache remote nodes for cache with given name. * * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> remoteCacheNodes(AffinityTopologyVersion topVer) { return resolveDiscoCache(null, topVer).remoteCacheNodes(topVer.topologyVersion()); } /** * Gets cache nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> aliveCacheNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).aliveCacheNodes(cacheName, topVer.topologyVersion()); } /** * Gets cache remote nodes for cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache nodes. */ public Collection<ClusterNode> aliveRemoteCacheNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).aliveRemoteCacheNodes(cacheName, topVer.topologyVersion()); } /** * Gets alive remote nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of alive cache nodes. */ public Collection<ClusterNode> aliveRemoteNodesWithCaches(AffinityTopologyVersion topVer) { return resolveDiscoCache(null, topVer).aliveRemoteNodesWithCaches(topVer.topologyVersion()); } /** * Gets alive nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of alive cache nodes. */ public Collection<ClusterNode> aliveNodesWithCaches(AffinityTopologyVersion topVer) { return resolveDiscoCache(null, topVer).aliveNodesWithCaches(topVer.topologyVersion()); } /** * Gets cache nodes for cache with given name that participate in affinity calculation. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of cache affinity nodes. */ public Collection<ClusterNode> cacheAffinityNodes(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).cacheAffinityNodes(cacheName, topVer.topologyVersion()); } /** * Checks if node is a data node for the given cache. * * @param node Node to check. * @param cacheName Cache name. * @return {@code True} if node is a cache data node. */ public boolean cacheAffinityNode(ClusterNode node, String cacheName) { CachePredicate predicate = registeredCaches.get(cacheName); return predicate != null && predicate.dataNode(node); } /** * @param node Node to check. * @param cacheName Cache name. * @return {@code True} if node has near cache enabled. */ public boolean cacheNearNode(ClusterNode node, String cacheName) { CachePredicate predicate = registeredCaches.get(cacheName); return predicate != null && predicate.nearNode(node); } /** * @param node Node to check. * @param cacheName Cache name. * @return {@code True} if node has client cache (without near cache). */ public boolean cacheClientNode(ClusterNode node, String cacheName) { CachePredicate predicate = registeredCaches.get(cacheName); return predicate != null && predicate.clientNode(node); } /** * @param node Node to check. * @param cacheName Cache name. * @return If cache with the given name is accessible on the given node. */ public boolean cacheNode(ClusterNode node, String cacheName) { CachePredicate predicate = registeredCaches.get(cacheName); return predicate != null && predicate.cacheNode(node); } /** * Checks if cache with given name has at least one node with near cache enabled. * * @param cacheName Cache name. * @param topVer Topology version. * @return {@code True} if cache with given name has at least one node with near cache enabled. */ public boolean hasNearCache(@Nullable String cacheName, AffinityTopologyVersion topVer) { return resolveDiscoCache(cacheName, topVer).hasNearCache(cacheName); } /** * Gets discovery cache for given topology version. * * @param cacheName Cache name (participates in exception message). * @param topVer Topology version. * @return Discovery cache. */ private DiscoCache resolveDiscoCache(@Nullable String cacheName, AffinityTopologyVersion topVer) { Snapshot snap = topSnap.get(); DiscoCache cache = AffinityTopologyVersion.NONE.equals(topVer) || topVer.equals(snap.topVer) ? snap.discoCache : discoCacheHist.get(topVer); if (cache == null) { // Find the eldest acceptable discovery cache. Map.Entry<AffinityTopologyVersion, DiscoCache> eldest = Collections.min(discoCacheHist.entrySet(), histCmp); if (topVer.compareTo(eldest.getKey()) < 0) cache = eldest.getValue(); } if (cache == null) { throw new IgniteException("Failed to resolve nodes topology [cacheName=" + cacheName + ", topVer=" + topVer + ", history=" + discoCacheHist.keySet() + ", locNode=" + ctx.discovery().localNode() + ']'); } return cache; } /** * Gets topology by specified version from history storage. * * @param topVer Topology version. * @return Topology nodes or {@code null} if there are no nodes for passed in version. */ @Nullable public Collection<ClusterNode> topology(long topVer) { if (!histSupported) throw new UnsupportedOperationException("Current discovery SPI does not support " + "topology snapshots history (consider using TCP discovery SPI)."); Map<Long, Collection<ClusterNode>> snapshots = topHist; return snapshots.get(topVer); } /** @return All daemon nodes in topology. */ public Collection<ClusterNode> daemonNodes() { return discoCache().daemonNodes(); } /** @return Local node. */ public ClusterNode localNode() { return locNode == null ? getSpi().getLocalNode() : locNode; } /** @return Topology version. */ public long topologyVersion() { return topSnap.get().topVer.topologyVersion(); } /** * @return Topology version. */ public AffinityTopologyVersion topologyVersionEx() { return topSnap.get().topVer; } /** @return Event that represents a local node joined to topology. */ public DiscoveryEvent localJoinEvent() { try { return locJoinEvt.get(); } catch (IgniteCheckedException e) { throw new IgniteException(e); } } /** * @param evt Event. */ public void sendCustomEvent(Serializable evt) { getSpi().sendCustomEvent(evt); } /** * Gets first grid node start time, see {@link DiscoverySpi#getGridStartTime()}. * * @return Start time of the first grid node. */ public long gridStartTime() { return getSpi().getGridStartTime(); } /** * @param nodeId Node ID. * @return Whether node is failed. */ public boolean tryFailNode(UUID nodeId) { if (!getSpi().pingNode(nodeId)) { getSpi().failNode(nodeId); return true; } return false; } /** * Updates topology version if current version is smaller than updated. * * @param updated Updated topology version. * @return {@code True} if topology was updated. */ private boolean updateTopologyVersionIfGreater(AffinityTopologyVersion updated, DiscoCache discoCache) { while (true) { Snapshot cur = topSnap.get(); if (updated.compareTo(cur.topVer) >= 0) { if (topSnap.compareAndSet(cur, new Snapshot(updated, discoCache))) return true; } else return false; } } /** Stops local node. */ private void stopNode() { new Thread( new Runnable() { @Override public void run() { ctx.markSegmented(); G.stop(ctx.gridName(), true); } } ).start(); } /** Restarts JVM. */ private void restartJvm() { new Thread( new Runnable() { @Override public void run() { ctx.markSegmented(); G.restart(true); } } ).start(); } /** Worker for network segment checks. */ private class SegmentCheckWorker extends GridWorker { /** */ private final BlockingQueue<Object> queue = new LinkedBlockingQueue<>(); /** * */ private SegmentCheckWorker() { super(ctx.gridName(), "disco-net-seg-chk-worker", GridDiscoveryManager.this.log); assert hasRslvrs; assert segChkFreq > 0; } /** * */ public void scheduleSegmentCheck() { queue.add(new Object()); } /** {@inheritDoc} */ @SuppressWarnings("StatementWithEmptyBody") @Override protected void body() throws InterruptedException { long lastChk = 0; while (!isCancelled()) { Object req = queue.poll(2000, MILLISECONDS); long now = U.currentTimeMillis(); // Check frequency if segment check has not been requested. if (req == null && (segChkFreq == 0 || lastChk + segChkFreq >= now)) { if (log.isDebugEnabled()) log.debug("Skipping segment check as it has not been requested and it is not time to check."); continue; } // We should always check segment if it has been explicitly // requested (on any node failure or leave). assert req != null || lastChk + segChkFreq < now; // Drain queue. while (queue.poll() != null) { // No-op. } if (lastSegChkRes.get()) { boolean segValid = ctx.segmentation().isValidSegment(); lastChk = now; if (!segValid) { discoWrk.addEvent(EVT_NODE_SEGMENTED, AffinityTopologyVersion.NONE, getSpi().getLocalNode(), Collections.<ClusterNode>emptyList(), null); lastSegChkRes.set(false); } if (log.isDebugEnabled()) log.debug("Segment has been checked [requested=" + (req != null) + ", valid=" + segValid + ']'); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(SegmentCheckWorker.class, this); } } /** Worker for discovery events. */ private class DiscoveryWorker extends GridWorker { /** Event queue. */ private final BlockingQueue<GridTuple5<Integer, AffinityTopologyVersion, ClusterNode, Collection<ClusterNode>, Serializable>> evts = new LinkedBlockingQueue<>(); /** Node segmented event fired flag. */ private boolean nodeSegFired; /** * */ private DiscoveryWorker() { super(ctx.gridName(), "disco-event-worker", GridDiscoveryManager.this.log); } /** * Method is called when any discovery event occurs. * * @param type Discovery event type. See {@link DiscoveryEvent} for more details. * @param topVer Topology version. * @param node Remote node this event is connected with. * @param topSnapshot Topology snapshot. */ @SuppressWarnings("RedundantTypeArguments") private void recordEvent(int type, long topVer, ClusterNode node, Collection<ClusterNode> topSnapshot) { assert node != null; if (ctx.event().isRecordable(type)) { DiscoveryEvent evt = new DiscoveryEvent(); evt.node(ctx.discovery().localNode()); evt.eventNode(node); evt.type(type); evt.topologySnapshot(topVer, U.<ClusterNode, ClusterNode>arrayList(topSnapshot, daemonFilter)); if (type == EVT_NODE_METRICS_UPDATED) evt.message("Metrics were updated: " + node); else if (type == EVT_NODE_JOINED) evt.message("Node joined: " + node); else if (type == EVT_NODE_LEFT) evt.message("Node left: " + node); else if (type == EVT_NODE_FAILED) evt.message("Node failed: " + node); else if (type == EVT_NODE_SEGMENTED) evt.message("Node segmented: " + node); else assert false; ctx.event().record(evt); } } /** * @param type Event type. * @param topVer Topology version. * @param node Node. * @param topSnapshot Topology snapshot. */ void addEvent( int type, AffinityTopologyVersion topVer, ClusterNode node, Collection<ClusterNode> topSnapshot, @Nullable Serializable data ) { assert node != null; evts.add(F.t(type, topVer, node, topSnapshot, data)); } /** * @param node Node to get a short description for. * @return Short description for the node to be used in 'quiet' mode. */ private String quietNode(ClusterNode node) { assert node != null; return "nodeId8=" + node.id().toString().substring(0, 8) + ", " + "addrs=" + U.addressesAsString(node) + ", " + "order=" + node.order() + ", " + "CPUs=" + node.metrics().getTotalCpus(); } /** {@inheritDoc} */ @Override protected void body() throws InterruptedException { while (!isCancelled()) { try { body0(); } catch (InterruptedException e) { throw e; } catch (Throwable t) { U.error(log, "Unexpected exception in discovery worker thread (ignored).", t); } } } /** @throws InterruptedException If interrupted. */ @SuppressWarnings("DuplicateCondition") private void body0() throws InterruptedException { GridTuple5<Integer, AffinityTopologyVersion, ClusterNode, Collection<ClusterNode>, Serializable> evt = evts.take(); int type = evt.get1(); AffinityTopologyVersion topVer = evt.get2(); ClusterNode node = evt.get3(); boolean isDaemon = node.isDaemon(); boolean segmented = false; switch (type) { case EVT_NODE_JOINED: { assert !discoOrdered || topVer.topologyVersion() == node.order() : "Invalid topology version [topVer=" + topVer + ", node=" + node + ']'; try { checkAttributes(F.asList(node)); } catch (IgniteCheckedException e) { U.warn(log, e.getMessage()); // We a have well-formed attribute warning here. } if (!isDaemon) { if (!isLocDaemon) { if (log.isInfoEnabled()) log.info("Added new node to topology: " + node); ackTopology(topVer.topologyVersion(), true); } else if (log.isDebugEnabled()) log.debug("Added new node to topology: " + node); } else if (log.isDebugEnabled()) log.debug("Added new daemon node to topology: " + node); break; } case EVT_NODE_LEFT: { // Check only if resolvers were configured. if (hasRslvrs) segChkWrk.scheduleSegmentCheck(); if (!isDaemon) { if (!isLocDaemon) { if (log.isInfoEnabled()) log.info("Node left topology: " + node); ackTopology(topVer.topologyVersion(), true); } else if (log.isDebugEnabled()) log.debug("Node left topology: " + node); } else if (log.isDebugEnabled()) log.debug("Daemon node left topology: " + node); break; } case EVT_NODE_FAILED: { // Check only if resolvers were configured. if (hasRslvrs) segChkWrk.scheduleSegmentCheck(); if (!isDaemon) { if (!isLocDaemon) { U.warn(log, "Node FAILED: " + node); ackTopology(topVer.topologyVersion(), true); } else if (log.isDebugEnabled()) log.debug("Node FAILED: " + node); } else if (log.isDebugEnabled()) log.debug("Daemon node FAILED: " + node); break; } case EVT_NODE_SEGMENTED: { assert F.eqNodes(localNode(), node); if (nodeSegFired) { if (log.isDebugEnabled()) { log.debug("Ignored node segmented event [type=EVT_NODE_SEGMENTED, " + "node=" + node + ']'); } return; } // Ignore all further EVT_NODE_SEGMENTED events // until EVT_NODE_RECONNECTED is fired. nodeSegFired = true; lastLoggedTop.set(0); segmented = true; if (!isLocDaemon) U.warn(log, "Local node SEGMENTED: " + node); else if (log.isDebugEnabled()) log.debug("Local node SEGMENTED: " + node); break; } case DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT: { if (ctx.event().isRecordable(DiscoveryCustomEvent.EVT_DISCOVERY_CUSTOM_EVT)) { DiscoveryCustomEvent customEvt = new DiscoveryCustomEvent(); customEvt.node(ctx.discovery().localNode()); customEvt.eventNode(node); customEvt.type(type); customEvt.topologySnapshot(topVer.topologyVersion(), null); customEvt.affinityTopologyVersion(topVer); customEvt.data(evt.get5()); ctx.event().record(customEvt); } return; } // Don't log metric update to avoid flooding the log. case EVT_NODE_METRICS_UPDATED: break; default: assert false : "Invalid discovery event: " + type; } recordEvent(type, topVer.topologyVersion(), node, evt.get4()); if (segmented) onSegmentation(); } /** * */ private void onSegmentation() { SegmentationPolicy segPlc = ctx.config().getSegmentationPolicy(); // Always disconnect first. try { getSpi().disconnect(); } catch (IgniteSpiException e) { U.error(log, "Failed to disconnect discovery SPI.", e); } switch (segPlc) { case RESTART_JVM: U.warn(log, "Restarting JVM according to configured segmentation policy."); restartJvm(); break; case STOP: U.warn(log, "Stopping local node according to configured segmentation policy."); stopNode(); break; default: assert segPlc == NOOP : "Unsupported segmentation policy value: " + segPlc; } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DiscoveryWorker.class, this); } } /** * */ private class MetricsUpdater extends GridWorker { /** */ private long prevGcTime = -1; /** */ private long prevCpuTime = -1; /** * */ private MetricsUpdater() { super(ctx.gridName(), "metrics-updater", GridDiscoveryManager.this.log); } /** {@inheritDoc} */ @Override protected void body() throws IgniteInterruptedCheckedException { while (!isCancelled()) { U.sleep(METRICS_UPDATE_FREQ); gcCpuLoad = getGcCpuLoad(); cpuLoad = getCpuLoad(); } } /** * @return GC CPU load. */ private double getGcCpuLoad() { long gcTime = 0; for (GarbageCollectorMXBean bean : gc) { long colTime = bean.getCollectionTime(); if (colTime > 0) gcTime += colTime; } gcTime /= metrics.getAvailableProcessors(); double gc = 0; if (prevGcTime > 0) { long gcTimeDiff = gcTime - prevGcTime; gc = (double)gcTimeDiff / METRICS_UPDATE_FREQ; } prevGcTime = gcTime; return gc; } /** * @return CPU load. */ private double getCpuLoad() { long cpuTime; try { cpuTime = U.<Long>property(os, "processCpuTime"); } catch (IgniteException ignored) { return -1; } // Method reports time in nanoseconds across all processors. cpuTime /= 1000000 * metrics.getAvailableProcessors(); double cpu = 0; if (prevCpuTime > 0) { long cpuTimeDiff = cpuTime - prevCpuTime; // CPU load could go higher than 100% because calculating of cpuTimeDiff also takes some time. cpu = Math.min(1.0, (double)cpuTimeDiff / METRICS_UPDATE_FREQ); } prevCpuTime = cpuTime; return cpu; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(MetricsUpdater.class, this, super.toString()); } } /** Discovery topology future. */ private static class DiscoTopologyFuture extends GridFutureAdapter<Long> implements GridLocalEventListener { /** */ private static final long serialVersionUID = 0L; /** */ private GridKernalContext ctx; /** Topology await version. */ private long awaitVer; /** Empty constructor required by {@link Externalizable}. */ private DiscoTopologyFuture() { // No-op. } /** * @param ctx Context. * @param awaitVer Await version. */ private DiscoTopologyFuture(GridKernalContext ctx, long awaitVer) { this.ctx = ctx; this.awaitVer = awaitVer; } /** Initializes future. */ private void init() { ctx.event().addLocalEventListener(this, EVT_NODE_JOINED, EVT_NODE_LEFT, EVT_NODE_FAILED); // Close potential window. long topVer = ctx.discovery().topologyVersion(); if (topVer >= awaitVer) onDone(topVer); } /** {@inheritDoc} */ @Override public boolean onDone(@Nullable Long res, @Nullable Throwable err) { if (super.onDone(res, err)) { ctx.event().removeLocalEventListener(this, EVT_NODE_JOINED, EVT_NODE_LEFT, EVT_NODE_FAILED); return true; } return false; } /** {@inheritDoc} */ @Override public void onEvent(Event evt) { assert evt.type() == EVT_NODE_JOINED || evt.type() == EVT_NODE_LEFT || evt.type() == EVT_NODE_FAILED; DiscoveryEvent discoEvt = (DiscoveryEvent)evt; if (discoEvt.topologyVersion() >= awaitVer) onDone(discoEvt.topologyVersion()); } } /** * */ private static class Snapshot { /** */ private final AffinityTopologyVersion topVer; /** */ private final DiscoCache discoCache; /** * @param topVer Topology version. * @param discoCache Disco cache. */ private Snapshot(AffinityTopologyVersion topVer, DiscoCache discoCache) { this.topVer = topVer; this.discoCache = discoCache; } } /** Cache for discovery collections. */ private class DiscoCache { /** Remote nodes. */ private final List<ClusterNode> rmtNodes; /** All nodes. */ private final List<ClusterNode> allNodes; /** All nodes with at least one cache configured. */ @GridToStringInclude private final Collection<ClusterNode> allNodesWithCaches; /** All nodes with at least one cache configured. */ @GridToStringInclude private final Collection<ClusterNode> rmtNodesWithCaches; /** Cache nodes by cache name. */ @GridToStringInclude private final Map<String, Collection<ClusterNode>> allCacheNodes; /** Remote cache nodes by cache name. */ @GridToStringInclude private final Map<String, Collection<ClusterNode>> rmtCacheNodes; /** Cache nodes by cache name. */ @GridToStringInclude private final Map<String, Collection<ClusterNode>> affCacheNodes; /** Caches where at least one node has near cache enabled. */ @GridToStringInclude private final Set<String> nearEnabledCaches; /** Nodes grouped by version. */ private final NavigableMap<IgniteProductVersion, Collection<ClusterNode>> nodesByVer; /** Daemon nodes. */ private final List<ClusterNode> daemonNodes; /** Node map. */ private final Map<UUID, ClusterNode> nodeMap; /** Local node. */ private final ClusterNode loc; /** Highest node order. */ private final long maxOrder; /** * Cached alive nodes list. As long as this collection doesn't accept {@code null}s use {@link * #maskNull(String)} before passing raw cache names to it. */ private final ConcurrentMap<String, Collection<ClusterNode>> aliveCacheNodes; /** * Cached alive remote nodes list. As long as this collection doesn't accept {@code null}s use {@link * #maskNull(String)} before passing raw cache names to it. */ private final ConcurrentMap<String, Collection<ClusterNode>> aliveRmtCacheNodes; /** * Cached alive remote nodes with caches. */ private final Collection<ClusterNode> aliveNodesWithCaches; /** * Cached alive remote nodes with caches. */ private final Collection<ClusterNode> aliveRmtNodesWithCaches; /** * @param loc Local node. * @param rmts Remote nodes. */ private DiscoCache(ClusterNode loc, Collection<ClusterNode> rmts) { this.loc = loc; rmtNodes = Collections.unmodifiableList(new ArrayList<>(F.view(rmts, daemonFilter))); assert !rmtNodes.contains(loc) : "Remote nodes collection shouldn't contain local node" + " [rmtNodes=" + rmtNodes + ", loc=" + loc + ']'; List<ClusterNode> all = new ArrayList<>(rmtNodes.size() + 1); if (!loc.isDaemon()) all.add(loc); all.addAll(rmtNodes); allNodes = Collections.unmodifiableList(all); Map<String, Collection<ClusterNode>> cacheMap = new HashMap<>(allNodes.size(), 1.0f); Map<String, Collection<ClusterNode>> rmtCacheMap = new HashMap<>(allNodes.size(), 1.0f); Map<String, Collection<ClusterNode>> dhtNodesMap = new HashMap<>(allNodes.size(), 1.0f); Collection<ClusterNode> nodesWithCaches = new HashSet<>(allNodes.size()); Collection<ClusterNode> rmtNodesWithCaches = new HashSet<>(allNodes.size()); aliveCacheNodes = new ConcurrentHashMap8<>(allNodes.size(), 1.0f); aliveRmtCacheNodes = new ConcurrentHashMap8<>(allNodes.size(), 1.0f); aliveNodesWithCaches = new ConcurrentSkipListSet<>(); aliveRmtNodesWithCaches = new ConcurrentSkipListSet<>(); nodesByVer = new TreeMap<>(); long maxOrder0 = 0; Set<String> nearEnabledSet = new HashSet<>(); for (ClusterNode node : allNodes) { assert node.order() != 0 : "Invalid node order [locNode=" + loc + ", node=" + node + ']'; if (node.order() > maxOrder0) maxOrder0 = node.order(); boolean hasCaches = false; for (Map.Entry<String, CachePredicate> entry : registeredCaches.entrySet()) { String cacheName = entry.getKey(); CachePredicate filter = entry.getValue(); if (filter.cacheNode(node)) { nodesWithCaches.add(node); if (!loc.id().equals(node.id())) rmtNodesWithCaches.add(node); addToMap(cacheMap, cacheName, node); if (alive(node.id())) addToMap(aliveCacheNodes, maskNull(cacheName), node); if (filter.dataNode(node)) addToMap(dhtNodesMap, cacheName, node); if (filter.nearNode(node)) nearEnabledSet.add(cacheName); if (!loc.id().equals(node.id())) { addToMap(rmtCacheMap, cacheName, node); if (alive(node.id())) addToMap(aliveRmtCacheNodes, maskNull(cacheName), node); } hasCaches = true; } } if (hasCaches) { if (alive(node.id())) { aliveNodesWithCaches.add(node); if (!loc.id().equals(node.id())) aliveRmtNodesWithCaches.add(node); } } IgniteProductVersion nodeVer = U.productVersion(node); // Create collection for this version if it does not exist. Collection<ClusterNode> nodes = nodesByVer.get(nodeVer); if (nodes == null) { nodes = new ArrayList<>(allNodes.size()); nodesByVer.put(nodeVer, nodes); } nodes.add(node); } // Need second iteration to add this node to all previous node versions. for (ClusterNode node : allNodes) { IgniteProductVersion nodeVer = U.productVersion(node); // Get all versions lower or equal node's version. NavigableMap<IgniteProductVersion, Collection<ClusterNode>> updateView = nodesByVer.headMap(nodeVer, false); for (Collection<ClusterNode> prevVersions : updateView.values()) prevVersions.add(node); } maxOrder = maxOrder0; allCacheNodes = Collections.unmodifiableMap(cacheMap); rmtCacheNodes = Collections.unmodifiableMap(rmtCacheMap); affCacheNodes = Collections.unmodifiableMap(dhtNodesMap); allNodesWithCaches = Collections.unmodifiableCollection(nodesWithCaches); this.rmtNodesWithCaches = Collections.unmodifiableCollection(rmtNodesWithCaches); nearEnabledCaches = Collections.unmodifiableSet(nearEnabledSet); daemonNodes = Collections.unmodifiableList(new ArrayList<>( F.view(F.concat(false, loc, rmts), F0.not(daemonFilter)))); Map<UUID, ClusterNode> nodeMap = new HashMap<>(allNodes().size() + daemonNodes.size(), 1.0f); for (ClusterNode n : F.concat(false, allNodes(), daemonNodes())) nodeMap.put(n.id(), n); this.nodeMap = nodeMap; } /** * Adds node to map. * * @param cacheMap Map to add to. * @param cacheName Cache name. * @param rich Node to add */ private void addToMap(Map<String, Collection<ClusterNode>> cacheMap, String cacheName, ClusterNode rich) { Collection<ClusterNode> cacheNodes = cacheMap.get(cacheName); if (cacheNodes == null) { cacheNodes = new ArrayList<>(allNodes.size()); cacheMap.put(cacheName, cacheNodes); } cacheNodes.add(rich); } /** @return Local node. */ ClusterNode localNode() { return loc; } /** @return Remote nodes. */ Collection<ClusterNode> remoteNodes() { return rmtNodes; } /** @return All nodes. */ Collection<ClusterNode> allNodes() { return allNodes; } /** * @return All nodes with at least one cache configured. */ Collection<ClusterNode> allNodesWithCaches() { return allNodesWithCaches; } /** * Gets collection of nodes which have version equal or greater than {@code ver}. * * @param ver Version to check. * @return Collection of nodes with version equal or greater than {@code ver}. */ Collection<ClusterNode> elderNodes(IgniteProductVersion ver) { Map.Entry<IgniteProductVersion, Collection<ClusterNode>> entry = nodesByVer.ceilingEntry(ver); if (entry == null) return Collections.emptyList(); return entry.getValue(); } /** * @return Versions map. */ NavigableMap<IgniteProductVersion, Collection<ClusterNode>> versionsMap() { return nodesByVer; } /** * Gets collection of nodes with at least one cache configured. * * @param topVer Topology version (maximum allowed node order). * @return Collection of nodes. */ Collection<ClusterNode> allNodesWithCaches(final long topVer) { return filter(topVer, allNodesWithCaches); } /** * Gets all nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> cacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, allCacheNodes.get(cacheName)); } /** * Gets all remote nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> remoteCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, rmtCacheNodes.get(cacheName)); } /** * Gets all remote nodes that have at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> remoteCacheNodes(final long topVer) { return filter(topVer, rmtNodesWithCaches); } /** * Gets all nodes that have cache with given name and should participate in affinity calculation. With * partitioned cache nodes with near-only cache do not participate in affinity node calculation. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> cacheAffinityNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, affCacheNodes.get(cacheName)); } /** * Gets all alive nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, aliveCacheNodes.get(maskNull(cacheName))); } /** * Gets all alive remote nodes that have cache with given name. * * @param cacheName Cache name. * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveRemoteCacheNodes(@Nullable String cacheName, final long topVer) { return filter(topVer, aliveRmtCacheNodes.get(maskNull(cacheName))); } /** * Gets all alive remote nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveRemoteNodesWithCaches(final long topVer) { return filter(topVer, aliveRmtNodesWithCaches); } /** * Gets all alive remote nodes with at least one cache configured. * * @param topVer Topology version. * @return Collection of nodes. */ Collection<ClusterNode> aliveNodesWithCaches(final long topVer) { return filter(topVer, aliveNodesWithCaches); } /** * Checks if cache with given name has at least one node with near cache enabled. * * @param cacheName Cache name. * @return {@code True} if cache with given name has at least one node with near cache enabled. */ boolean hasNearCache(@Nullable String cacheName) { return nearEnabledCaches.contains(cacheName); } /** * Removes left node from cached alives lists. * * @param leftNode Left node. */ void updateAlives(ClusterNode leftNode) { if (leftNode.order() > maxOrder) return; filterNodeMap(aliveCacheNodes, leftNode); filterNodeMap(aliveRmtCacheNodes, leftNode); aliveNodesWithCaches.remove(leftNode); aliveRmtNodesWithCaches.remove(leftNode); } /** * Creates a copy of nodes map without the given node. * * @param map Map to copy. * @param exclNode Node to exclude. */ private void filterNodeMap(ConcurrentMap<String, Collection<ClusterNode>> map, final ClusterNode exclNode) { for (String cacheName : registeredCaches.keySet()) { String maskedName = maskNull(cacheName); while (true) { Collection<ClusterNode> oldNodes = map.get(maskedName); if (oldNodes == null || oldNodes.isEmpty()) break; Collection<ClusterNode> newNodes = new ArrayList<>(oldNodes); if (!newNodes.remove(exclNode)) break; if (map.replace(maskedName, oldNodes, newNodes)) break; } } } /** * Replaces {@code null} with {@code NULL_CACHE_NAME}. * * @param cacheName Cache name. * @return Masked name. */ private String maskNull(@Nullable String cacheName) { return cacheName == null ? NULL_CACHE_NAME : cacheName; } /** * @param topVer Topology version. * @param nodes Nodes. * @return Filtered collection (potentially empty, but never {@code null}). */ private Collection<ClusterNode> filter(final long topVer, @Nullable Collection<ClusterNode> nodes) { if (nodes == null) return Collections.emptyList(); // If no filtering needed, return original collection. return nodes.isEmpty() || topVer < 0 || topVer >= maxOrder ? nodes : F.view(nodes, new P1<ClusterNode>() { @Override public boolean apply(ClusterNode node) { return node.order() <= topVer; } }); } /** @return Daemon nodes. */ Collection<ClusterNode> daemonNodes() { return daemonNodes; } /** * @param id Node ID. * @return Node. */ @Nullable ClusterNode node(UUID id) { return nodeMap.get(id); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(DiscoCache.class, this, "allNodesWithDaemons", U.toShortString(allNodes)); } } /** * Cache predicate. */ private static class CachePredicate { /** Cache filter. */ private IgnitePredicate<ClusterNode> cacheFilter; /** If near cache is enabled on data nodes. */ private boolean nearEnabled; /** Flag indicating if cache is local. */ private boolean loc; /** Collection of client near nodes. */ private Map<UUID, Boolean> clientNodes; /** * @param cacheFilter Cache filter. * @param nearEnabled Near enabled flag. */ private CachePredicate(IgnitePredicate<ClusterNode> cacheFilter, boolean nearEnabled, boolean loc) { assert cacheFilter != null; this.cacheFilter = cacheFilter; this.nearEnabled = nearEnabled; this.loc = loc; clientNodes = new ConcurrentHashMap<>(); } /** * @param nodeId Near node ID to add. */ public void addClientNode(UUID nodeId, boolean nearEnabled) { clientNodes.put(nodeId, nearEnabled); } /** * @param leftNodeId Left node ID. */ public void onNodeLeft(UUID leftNodeId) { clientNodes.remove(leftNodeId); } /** * @param node Node to check. * @return {@code True} if this node is a data node for given cache. */ public boolean dataNode(ClusterNode node) { return !node.isDaemon() && cacheFilter.apply(node); } /** * @param node Node to check. * @return {@code True} if cache is accessible on the given node. */ public boolean cacheNode(ClusterNode node) { return !node.isClient() && !node.isDaemon() && (cacheFilter.apply(node) || clientNodes.containsKey(node.id())); } /** * @param node Node to check. * @return {@code True} if near cache is present on the given nodes. */ public boolean nearNode(ClusterNode node) { if (node.isDaemon()) return false; if (nearEnabled && cacheFilter.apply(node)) return true; Boolean near = clientNodes.get(node.id()); return near != null && near; } /** * @param node Node to check. * @return {@code True} if client cache is present on the given nodes. */ public boolean clientNode(ClusterNode node) { if (node.isDaemon()) return false; Boolean near = clientNodes.get(node.id()); return near != null && !near; } } }
ignite-754: implemented
modules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java
ignite-754: implemented
<ide><path>odules/core/src/main/java/org/apache/ignite/internal/managers/discovery/GridDiscoveryManager.java <ide> <ide> Object locMode = locNode.attribute(ATTR_DEPLOYMENT_MODE); <ide> <add> int locJvmMajVer = nodeJavaMajorVer(locNode); <add> <ide> boolean locP2pEnabled = locNode.attribute(ATTR_PEER_CLASSLOADING); <ide> <ide> boolean warned = false; <ide> <ide> for (ClusterNode n : nodes) { <add> int rmtJvmMajVer = nodeJavaMajorVer(n); <add> <add> if (locJvmMajVer != rmtJvmMajVer) <add> throw new IgniteCheckedException("Local node's java major version = " + locJvmMajVer + <add> " is different from remote node's one = " + rmtJvmMajVer); <add> <ide> String rmtPreferIpV4 = n.attribute("java.net.preferIPv4Stack"); <ide> <ide> if (!F.eq(rmtPreferIpV4, locPreferIpV4)) { <ide> log.debug("Finished node attributes consistency check."); <ide> } <ide> <add> private int nodeJavaMajorVer(ClusterNode node) throws IgniteCheckedException { <add> try { <add> return Integer.parseInt(node.<String>attribute("java.version").split(".")[1]); <add> } <add> catch (Exception e) { <add> throw new IgniteCheckedException("Failed to get java major version with reason: " + e.getMessage()); <add> } <add> } <ide> /** <ide> * @param nodes Nodes. <ide> * @return Total CPUs.
Java
bsd-3-clause
8fde256505bb97e270313b30c3eedeb4621408ab
0
gamma9mu/SOMa
package cs437.som.neighborhood; import cs437.som.NeighborhoodWidthFunction; import cs437.som.SOMError; import cs437.som.util.Reflector; import java.io.BufferedReader; import java.io.IOException; import java.util.Map; import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Collects a series of neighborhood width functions to be used sequentially in * training a SOM. This allows a SOM to be trained with a large, linearly * decreasing neighborhood width initially and, afterwards, the width function * can change to a exponentially decreasing function. Any number of * combinations should be possible. * * Using neighborhood width functions in a CompoundNeighborhood is an exception * to the instructions given in the NeighborhoodWidthFunction interface * documentation: * * The NeighborhoodWidthFunction object should have its expected iterations * property set before it is added to a CompoundNeighborhood object. * * Once a NeighborhoodWidthFunction has been added to a CompoundNeighborhood * object, its ownership is passed to the CompoundNeighborhood object ad should * not be modified by the user afterward. */ public class CompoundNeighborhood implements NeighborhoodWidthFunction { private int nextTransition = -1; private int expectedIterations = 0; private NeighborhoodWidthFunction currentFunction = null; private final Map<Integer, NeighborhoodWidthFunction> widthFunctions = new TreeMap<Integer, NeighborhoodWidthFunction>(); public CompoundNeighborhood() { } public CompoundNeighborhood(NeighborhoodWidthFunction initialWidthFuncton) { widthFunctions.put(0, initialWidthFuncton); currentFunction = initialWidthFuncton; } public void setExpectedIterations(int expectedIterations) { if (nextTransition == -1) { nextTransition = expectedIterations; this.expectedIterations = expectedIterations; } } public double neighborhoodWidth(int iteration) { if (iteration == nextTransition) { shiftFunctions(); } return currentFunction.neighborhoodWidth(iteration); } /** * Add a neighborhood function to be used after a specific number of * iterations. * * @param neighborhood The neighborhood function object to add. * @param startAt The iteration at which to use {@code neighborhood}. */ public void addNeighborhood(NeighborhoodWidthFunction neighborhood, int startAt) { widthFunctions.put(startAt, neighborhood); nextTransition = findLowest(0); } /** * Find the next neighborhood width function shift it into the current * slot. */ private void shiftFunctions() { // Don't try to shift if there's nothing left to use. if (widthFunctions.isEmpty()) { return; } // Find the next lowest transition point int low = findLowest(nextTransition); // Move the next function into place. currentFunction = widthFunctions.get(low); nextTransition = low; } /** * Find the child neighborhood function with the next lowest starting * point. * * @param afterWhere The point after which to accept starting points. * @return The index into {@code widthFunctions} of the matching * neighborhood function. */ private int findLowest(int afterWhere) { int match = expectedIterations; for (Integer i : widthFunctions.keySet()) { if (i < match && i > afterWhere) { match = i; } } return match; } @Override public String toString() { StringBuilder sb = new StringBuilder("CompoundNeighborhood begin"); for (Map.Entry<Integer, NeighborhoodWidthFunction> next : widthFunctions.entrySet()) { sb.append(String.format("%d %s", next.getKey(), next.getValue())); } sb.append(String.format("%n end")); return sb.toString(); } /** * Load a CompoundNeighborhood from a stream reader. * * @param reader The stream to read from. * @return A CompoundNeighborhood read from {@code reader}. * @throws IOException if an I/O error occurs. */ public static NeighborhoodWidthFunction parse(BufferedReader reader) throws IOException { Pattern neighborhhodRegEx = Pattern.compile("(\\d*)\\s*(\\w*)\\s*(.*)"); CompoundNeighborhood cnw = new CompoundNeighborhood(); String line = reader.readLine(); while (line.compareToIgnoreCase("end") != 0) { Matcher nwMatch = neighborhhodRegEx.matcher(line); if (!nwMatch.matches()) { throw new SOMError("Bad input while parsing neighborhood " + "functions: " + line); } NeighborhoodWidthFunction nw = (NeighborhoodWidthFunction) Reflector.instantiateFromString("cs437.som.neighborhood", nwMatch.group(1), nwMatch.group(2)); int startsAt = Integer.parseInt(nwMatch.group(1)); cnw.addNeighborhood(nw, startsAt); } return cnw; } }
src/cs437/som/neighborhood/CompoundNeighborhood.java
package cs437.som.neighborhood; import cs437.som.NeighborhoodWidthFunction; import cs437.som.SOMError; import cs437.som.util.Reflector; import java.io.BufferedReader; import java.io.IOException; import java.util.Map; import java.util.TreeMap; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Collects a series of neighborhood width functions to be used sequentially in * training a SOM. This allows a SOM to be trained with a large, linearly * decreasing neighborhood width initially and, afterwards, the width function * can change to a exponentially decreasing function. Any number of * combinations should be possible. * * Using neighborhood width functions in a CompoundNeighborhood is an exception * to the instructions given in the NeighborhoodWidthFunction interface * documentation: * * The NeighborhoodWidthFunction object should have its expected iterations * property set before it is added to a CompoundNeighborhood object. * * Once a NeighborhoodWidthFunction has been added to a CompoundNeighborhood * object, its ownership is passed to the CompoundNeighborhood object ad should * not be modified by the user afterward. */ public class CompoundNeighborhood implements NeighborhoodWidthFunction { private int nextTransition = -1; private int expectedIterations = 0; private NeighborhoodWidthFunction currentFunction = null; private final Map<Integer, NeighborhoodWidthFunction> widthFunctions = new TreeMap<Integer, NeighborhoodWidthFunction>(); public CompoundNeighborhood() { } public CompoundNeighborhood(NeighborhoodWidthFunction initialWidthFuncton) { widthFunctions.put(0, initialWidthFuncton); currentFunction = initialWidthFuncton; } public void setExpectedIterations(int expectedIterations) { if (nextTransition == -1) { nextTransition = expectedIterations; this.expectedIterations = expectedIterations; } } public double neighborhoodWidth(int iteration) { if (iteration == nextTransition) { shiftFunctions(); } return currentFunction.neighborhoodWidth(iteration); } /** * Add a neighborhood function to be used after a specific number of * iterations. * * @param neighborhood The neighborhood function object to add. * @param startAt The iteration at which to use {@code neighborhood}. */ public void addNeighborhood(NeighborhoodWidthFunction neighborhood, int startAt) { widthFunctions.put(startAt, neighborhood); nextTransition = findLowest(0); } /** * Find the next neighborhood width function shift it into the current * slot. */ private void shiftFunctions() { // Don't try to shift if there's nothing left to use. if (widthFunctions.isEmpty()) { return; } // Find the next lowest transition point int low = findLowest(nextTransition); // Move the next function into place. currentFunction = widthFunctions.get(low); nextTransition = low; } /** * Find the child neighborhood function with the next lowest starting * point. * * @param afterWhere The point after which to accept starting points. * @return The index into {@code widthFunctions} of the matching * neighborhood function. */ private int findLowest(int afterWhere) { int match = expectedIterations; for (Integer i : widthFunctions.keySet()) { if (i < match && i > afterWhere) { match = i; } } return match; } @Override public String toString() { StringBuilder sb = new StringBuilder("CompoundNeighborhood begin"); for (Map.Entry<Integer, NeighborhoodWidthFunction> next : widthFunctions.entrySet()) { sb.append(String.format("%d %s", next.getKey(), next.getValue())); } sb.append(String.format("%n end")); return sb.toString(); } public static NeighborhoodWidthFunction parse(BufferedReader reader) throws IOException { Pattern neighborhhodRegEx = Pattern.compile("(\\d*)\\s*(\\w*)\\s*(.*)"); CompoundNeighborhood cnw = new CompoundNeighborhood(); String line = reader.readLine(); while (line.compareToIgnoreCase("end") != 0) { Matcher nwMatch = neighborhhodRegEx.matcher(line); if (!nwMatch.matches()) { throw new SOMError("Bad input while parsing neighborhood " + "functions: " + line); } NeighborhoodWidthFunction nw = (NeighborhoodWidthFunction) Reflector.instantiateFromString("cs437.som.neighborhood", nwMatch.group(1), nwMatch.group(2)); int startsAt = Integer.parseInt(nwMatch.group(1)); cnw.addNeighborhood(nw, startsAt); } return cnw; } }
Document CompoundNeighborhood's stream loading.
src/cs437/som/neighborhood/CompoundNeighborhood.java
Document CompoundNeighborhood's stream loading.
<ide><path>rc/cs437/som/neighborhood/CompoundNeighborhood.java <ide> return sb.toString(); <ide> } <ide> <add> /** <add> * Load a CompoundNeighborhood from a stream reader. <add> * <add> * @param reader The stream to read from. <add> * @return A CompoundNeighborhood read from {@code reader}. <add> * @throws IOException if an I/O error occurs. <add> */ <ide> public static NeighborhoodWidthFunction parse(BufferedReader reader) <ide> throws IOException { <ide> Pattern neighborhhodRegEx = Pattern.compile("(\\d*)\\s*(\\w*)\\s*(.*)");
Java
apache-2.0
7a7fe6e98d0acc61007b1dcc327984c6af570a3f
0
flyway/flyway,flyway/flyway
/* * Copyright 2010-2017 Boxfuse GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flywaydb.core.internal.util.scanner.classpath.android; import android.content.Context; import dalvik.system.DexFile; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.api.android.ContextHolder; import org.flywaydb.core.api.logging.Log; import org.flywaydb.core.api.logging.LogFactory; import org.flywaydb.core.internal.util.ClassUtils; import org.flywaydb.core.internal.util.Location; import org.flywaydb.core.internal.util.scanner.LoadableResource; import org.flywaydb.core.internal.util.scanner.classpath.ResourceAndClassScanner; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; /** * Class & resource scanner for Android. */ public class AndroidScanner implements ResourceAndClassScanner { private static final Log LOG = LogFactory.getLog(AndroidScanner.class); private final Context context; private final ClassLoader classLoader; public AndroidScanner(ClassLoader classLoader) { this.classLoader = classLoader; context = ContextHolder.getContext(); if (context == null) { throw new FlywayException("Unable to scan for Migrations! Context not set. " + "Within an activity you can fix this with org.flywaydb.core.api.android.ContextHolder.setContext(this);"); } } public LoadableResource[] scanForResources(Location location, String prefix, String suffix) throws Exception { List<LoadableResource> resources = new ArrayList<LoadableResource>(); String path = location.getPath(); for (String asset : context.getAssets().list(path)) { if (asset.startsWith(prefix) && asset.endsWith(suffix) && (asset.length() > (prefix + suffix).length())) { resources.add(new AndroidResource(context.getAssets(), path, asset)); } else { LOG.debug("Filtering out asset: " + asset); } } return resources.toArray(new LoadableResource[resources.size()]); } public Class<?>[] scanForClasses(Location location, Class<?> implementedInterface) throws Exception { String pkg = location.getPath().replace("/", "."); List<Class> classes = new ArrayList<Class>(); DexFile dex = null; try { dex = new DexFile(context.getApplicationInfo().sourceDir); Enumeration<String> entries = dex.entries(); while (entries.hasMoreElements()) { String className = entries.nextElement(); if (className.startsWith(pkg)) { Class<?> clazz = classLoader.loadClass(className); if (Modifier.isAbstract(clazz.getModifiers())) { LOG.debug("Skipping abstract class: " + className); continue; } if (!implementedInterface.isAssignableFrom(clazz)) { continue; } ClassUtils.instantiate(className, classLoader); classes.add(clazz); LOG.debug("Found class: " + className); } } } finally { if (dex != null) { dex.close(); } return classes.toArray(new Class<?>[classes.size()]); } } }
flyway-core/src/main/java/org/flywaydb/core/internal/util/scanner/classpath/android/AndroidScanner.java
/* * Copyright 2010-2017 Boxfuse GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flywaydb.core.internal.util.scanner.classpath.android; import android.content.Context; import dalvik.system.DexFile; import org.flywaydb.core.api.FlywayException; import org.flywaydb.core.api.android.ContextHolder; import org.flywaydb.core.api.logging.Log; import org.flywaydb.core.api.logging.LogFactory; import org.flywaydb.core.internal.util.ClassUtils; import org.flywaydb.core.internal.util.Location; import org.flywaydb.core.internal.util.scanner.LoadableResource; import org.flywaydb.core.internal.util.scanner.classpath.ResourceAndClassScanner; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; /** * Class & resource scanner for Android. */ public class AndroidScanner implements ResourceAndClassScanner { private static final Log LOG = LogFactory.getLog(AndroidScanner.class); private final Context context; private final ClassLoader classLoader; public AndroidScanner(ClassLoader classLoader) { this.classLoader = classLoader; context = ContextHolder.getContext(); if (context == null) { throw new FlywayException("Unable to scan for Migrations! Context not set. " + "Within an activity you can fix this with org.flywaydb.core.api.android.ContextHolder.setContext(this);"); } } public LoadableResource[] scanForResources(Location location, String prefix, String suffix) throws Exception { List<LoadableResource> resources = new ArrayList<LoadableResource>(); String path = location.getPath(); for (String asset : context.getAssets().list(path)) { if (asset.startsWith(prefix) && asset.endsWith(suffix) && (asset.length() > (prefix + suffix).length())) { resources.add(new AndroidResource(context.getAssets(), path, asset)); } else { LOG.debug("Filtering out asset: " + asset); } } return resources.toArray(new LoadableResource[resources.size()]); } public Class<?>[] scanForClasses(Location location, Class<?> implementedInterface) throws Exception { String pkg = location.getPath().replace("/", "."); List<Class> classes = new ArrayList<Class>(); DexFile dex = new DexFile(context.getApplicationInfo().sourceDir); Enumeration<String> entries = dex.entries(); while (entries.hasMoreElements()) { String className = entries.nextElement(); if (className.startsWith(pkg)) { Class<?> clazz = classLoader.loadClass(className); if (Modifier.isAbstract(clazz.getModifiers())) { LOG.debug("Skipping abstract class: " + className); continue; } if (!implementedInterface.isAssignableFrom(clazz)) { continue; } ClassUtils.instantiate(className, classLoader); classes.add(clazz); LOG.debug("Found class: " + className); } } return classes.toArray(new Class<?>[classes.size()]); } }
Fix on Android platform DexFile resource leak, closing the DexFile instance
flyway-core/src/main/java/org/flywaydb/core/internal/util/scanner/classpath/android/AndroidScanner.java
Fix on Android platform DexFile resource leak, closing the DexFile instance
<ide><path>lyway-core/src/main/java/org/flywaydb/core/internal/util/scanner/classpath/android/AndroidScanner.java <ide> <ide> List<Class> classes = new ArrayList<Class>(); <ide> <del> DexFile dex = new DexFile(context.getApplicationInfo().sourceDir); <del> Enumeration<String> entries = dex.entries(); <del> while (entries.hasMoreElements()) { <del> String className = entries.nextElement(); <del> if (className.startsWith(pkg)) { <del> Class<?> clazz = classLoader.loadClass(className); <del> if (Modifier.isAbstract(clazz.getModifiers())) { <del> LOG.debug("Skipping abstract class: " + className); <del> continue; <add> DexFile dex = null; <add> <add> try { <add> dex = new DexFile(context.getApplicationInfo().sourceDir); <add> Enumeration<String> entries = dex.entries(); <add> while (entries.hasMoreElements()) { <add> String className = entries.nextElement(); <add> if (className.startsWith(pkg)) { <add> Class<?> clazz = classLoader.loadClass(className); <add> if (Modifier.isAbstract(clazz.getModifiers())) { <add> LOG.debug("Skipping abstract class: " + className); <add> continue; <add> } <add> <add> if (!implementedInterface.isAssignableFrom(clazz)) { <add> continue; <add> } <add> <add> ClassUtils.instantiate(className, classLoader); <add> <add> classes.add(clazz); <add> LOG.debug("Found class: " + className); <ide> } <del> <del> if (!implementedInterface.isAssignableFrom(clazz)) { <del> continue; <del> } <del> <del> ClassUtils.instantiate(className, classLoader); <del> <del> classes.add(clazz); <del> LOG.debug("Found class: " + className); <ide> } <add> } finally { <add> if (dex != null) { <add> dex.close(); <add> } <add> return classes.toArray(new Class<?>[classes.size()]); <ide> } <del> return classes.toArray(new Class<?>[classes.size()]); <ide> } <ide> }
Java
apache-2.0
869d4b888c806304cf965b32df89c18910daacf4
0
Reidddddd/mo-alluxio,maboelhassan/alluxio,calvinjia/tachyon,calvinjia/tachyon,calvinjia/tachyon,Alluxio/alluxio,PasaLab/tachyon,calvinjia/tachyon,yuluo-ding/alluxio,wwjiang007/alluxio,apc999/alluxio,PasaLab/tachyon,PasaLab/tachyon,riversand963/alluxio,aaudiber/alluxio,maboelhassan/alluxio,maobaolong/alluxio,jsimsa/alluxio,Reidddddd/mo-alluxio,apc999/alluxio,riversand963/alluxio,jsimsa/alluxio,Reidddddd/alluxio,aaudiber/alluxio,WilliamZapata/alluxio,Reidddddd/mo-alluxio,yuluo-ding/alluxio,PasaLab/tachyon,jsimsa/alluxio,Alluxio/alluxio,maobaolong/alluxio,maobaolong/alluxio,WilliamZapata/alluxio,jswudi/alluxio,Reidddddd/alluxio,uronce-cc/alluxio,Reidddddd/mo-alluxio,ChangerYoung/alluxio,jswudi/alluxio,ChangerYoung/alluxio,apc999/alluxio,maobaolong/alluxio,ChangerYoung/alluxio,jsimsa/alluxio,wwjiang007/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,bf8086/alluxio,madanadit/alluxio,apc999/alluxio,jswudi/alluxio,ChangerYoung/alluxio,wwjiang007/alluxio,uronce-cc/alluxio,bf8086/alluxio,maboelhassan/alluxio,EvilMcJerkface/alluxio,aaudiber/alluxio,madanadit/alluxio,wwjiang007/alluxio,madanadit/alluxio,WilliamZapata/alluxio,uronce-cc/alluxio,madanadit/alluxio,Reidddddd/alluxio,ShailShah/alluxio,jswudi/alluxio,Alluxio/alluxio,bf8086/alluxio,maobaolong/alluxio,calvinjia/tachyon,Reidddddd/alluxio,Alluxio/alluxio,maobaolong/alluxio,bf8086/alluxio,madanadit/alluxio,Alluxio/alluxio,bf8086/alluxio,ShailShah/alluxio,aaudiber/alluxio,bf8086/alluxio,ChangerYoung/alluxio,yuluo-ding/alluxio,apc999/alluxio,aaudiber/alluxio,jsimsa/alluxio,calvinjia/tachyon,madanadit/alluxio,EvilMcJerkface/alluxio,maboelhassan/alluxio,Reidddddd/alluxio,WilliamZapata/alluxio,Reidddddd/mo-alluxio,wwjiang007/alluxio,Reidddddd/mo-alluxio,riversand963/alluxio,WilliamZapata/alluxio,EvilMcJerkface/alluxio,aaudiber/alluxio,EvilMcJerkface/alluxio,uronce-cc/alluxio,ShailShah/alluxio,PasaLab/tachyon,yuluo-ding/alluxio,ShailShah/alluxio,jswudi/alluxio,maobaolong/alluxio,Alluxio/alluxio,riversand963/alluxio,yuluo-ding/alluxio,Alluxio/alluxio,jsimsa/alluxio,Alluxio/alluxio,yuluo-ding/alluxio,PasaLab/tachyon,wwjiang007/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,uronce-cc/alluxio,maboelhassan/alluxio,ShailShah/alluxio,Alluxio/alluxio,madanadit/alluxio,jswudi/alluxio,maboelhassan/alluxio,wwjiang007/alluxio,maobaolong/alluxio,maboelhassan/alluxio,PasaLab/tachyon,aaudiber/alluxio,WilliamZapata/alluxio,apc999/alluxio,wwjiang007/alluxio,calvinjia/tachyon,wwjiang007/alluxio,EvilMcJerkface/alluxio,bf8086/alluxio,uronce-cc/alluxio,maobaolong/alluxio,riversand963/alluxio,ShailShah/alluxio,madanadit/alluxio,Reidddddd/alluxio,apc999/alluxio,riversand963/alluxio,calvinjia/tachyon,wwjiang007/alluxio,ChangerYoung/alluxio,Alluxio/alluxio,Reidddddd/alluxio
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio; import alluxio.exception.AlluxioException; import alluxio.retry.CountingRetry; import alluxio.thrift.AlluxioService; import alluxio.thrift.AlluxioTException; import alluxio.thrift.ThriftIOException; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; /** * The base class for clients that use {@link alluxio.network.connection.ThriftClientPool}. * * @param <C> the Alluxio service type */ public abstract class AbstractThriftClient<C extends AlluxioService.Client> { private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE); private static final int RPC_MAX_NUM_RETRY = 30; /** * If the implementation of this function guarantees that the client returned will not * be returned to any other caller. Then this whole class is threadsafe. * * @return a Thrift service client */ protected abstract C acquireClient() throws IOException; /** * @param client the client to release */ protected abstract void releaseClient(C client); /** * The RPC to be executed in {@link #retryRPC(RpcCallable)}. * * @param <V> the return value of {@link #call(AlluxioService.Client)} * @param <C> the Alluxio service type */ protected interface RpcCallable<V, C extends AlluxioService.Client> { /** * The task where RPC happens. * * @return RPC result * @throws TException when any exception defined in thrift happens */ V call(C client) throws TException; } /** * Same with {@link RpcCallable} except that this RPC call throws {@link AlluxioTException} and * is to be executed in {@link #retryRPC(RpcCallableThrowsAlluxioTException)}. * * @param <V> the return value of {@link #call(AlluxioService.Client)} * @param <C> the Alluxio service type */ protected interface RpcCallableThrowsAlluxioTException<V, C extends AlluxioService.Client> { /** * The task where RPC happens. * * @return RPC result * @throws AlluxioTException when any {@link AlluxioException} happens during RPC and is wrapped * into {@link AlluxioTException} * @throws TException when any exception defined in thrift happens */ V call(C client) throws AlluxioTException, TException; } /** * Tries to execute an RPC defined as a {@link RpcCallable}. * * @param rpc the RPC call to be executed * @param <V> type of return value of the RPC call * @return the return value of the RPC call * @throws IOException when retries exceeds {@link #RPC_MAX_NUM_RETRY} or some server * side IOException occurred. */ protected <V> V retryRPC(RpcCallable<V, C> rpc) throws IOException { TException exception = null; CountingRetry retryPolicy = new CountingRetry(RPC_MAX_NUM_RETRY); do { C client = acquireClient(); try { return rpc.call(client); } catch (ThriftIOException e) { throw new IOException(e); } catch (AlluxioTException e) { throw Throwables.propagate(AlluxioException.fromThrift(e)); } catch (TException e) { LOG.error(e.getMessage(), e); closeClient(client); exception = e; } finally { releaseClient(client); } } while (retryPolicy.attemptRetry()); LOG.error("Failed after " + retryPolicy.getRetryCount() + " retries."); Preconditions.checkNotNull(exception); throw new IOException(exception); } /** * Similar to {@link #retryRPC(RpcCallable)} except that the RPC call may throw * {@link AlluxioTException} and once it is thrown, it will be transformed into * {@link AlluxioException} and be thrown. * * @param rpc the RPC call to be executed * @param <V> type of return value of the RPC call * @return the return value of the RPC call * @throws AlluxioException when {@link AlluxioTException} is thrown by the RPC call * @throws IOException when retries exceeds {@link #RPC_MAX_NUM_RETRY} or some server * side IOException occurred. */ protected <V> V retryRPC(RpcCallableThrowsAlluxioTException<V, C> rpc) throws AlluxioException, IOException { TException exception = null; CountingRetry retryPolicy = new CountingRetry(RPC_MAX_NUM_RETRY); do { C client = acquireClient(); try { return rpc.call(client); } catch (AlluxioTException e) { throw AlluxioException.fromThrift(e); } catch (ThriftIOException e) { throw new IOException(e); } catch (TException e) { LOG.error(e.getMessage(), e); closeClient(client); exception = e; } finally { releaseClient(client); } } while (retryPolicy.attemptRetry()); LOG.error("Failed after " + retryPolicy.getRetryCount() + " retries."); Preconditions.checkNotNull(exception); throw new IOException(exception); } /** * Close the given client. * * @param client the client to close */ private void closeClient(C client) { client.getOutputProtocol().getTransport().close(); } }
core/common/src/main/java/alluxio/AbstractThriftClient.java
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio; import alluxio.exception.AlluxioException; import alluxio.retry.CountingRetry; import alluxio.thrift.AlluxioService; import alluxio.thrift.AlluxioTException; import alluxio.thrift.ThriftIOException; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import org.apache.thrift.TException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import javax.annotation.concurrent.ThreadSafe; /** * The base class for clients that use {@link alluxio.network.connection.ThriftClientPool}. * * @param <C> the Alluxio service type */ public abstract class AbstractThriftClient<C extends AlluxioService.Client> { private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE); private static final int RPC_MAX_NUM_RETRY = 30; /** * If the implementation of this function guarantees that the client returned will not * be returned to any other caller. Then this whole class is threadsafe. * * @return a Thrift service client */ protected abstract C acquireClient() throws IOException; /** * @param client the client to release */ protected abstract void releaseClient(C client); /** * The RPC to be executed in {@link #retryRPC(RpcCallable)}. * * @param <V> the return value of {@link #call(AlluxioService.Client)} * @param <C> the Alluxio service type */ protected interface RpcCallable<V, C extends AlluxioService.Client> { /** * The task where RPC happens. * * @return RPC result * @throws TException when any exception defined in thrift happens */ V call(C client) throws TException; } /** * Same with {@link RpcCallable} except that this RPC call throws {@link AlluxioTException} and * is to be executed in {@link #retryRPC(RpcCallableThrowsAlluxioTException)}. * * @param <V> the return value of {@link #call(AlluxioService.Client)} * @param <C> the Alluxio service type */ protected interface RpcCallableThrowsAlluxioTException<V, C extends AlluxioService.Client> { /** * The task where RPC happens. * * @return RPC result * @throws AlluxioTException when any {@link AlluxioException} happens during RPC and is wrapped * into {@link AlluxioTException} * @throws TException when any exception defined in thrift happens */ V call(C client) throws AlluxioTException, TException; } /** * Tries to execute an RPC defined as a {@link RpcCallable}. * * @param rpc the RPC call to be executed * @param <V> type of return value of the RPC call * @return the return value of the RPC call * @throws IOException when retries exceeds {@link #RPC_MAX_NUM_RETRY} or some server * side IOException occurred. */ protected <V> V retryRPC(RpcCallable<V, C> rpc) throws IOException { TException exception = null; CountingRetry retryPolicy = new CountingRetry(RPC_MAX_NUM_RETRY); do { C client = acquireClient(); try { return rpc.call(client); } catch (ThriftIOException e) { throw new IOException(e); } catch (AlluxioTException e) { throw Throwables.propagate(AlluxioException.fromThrift(e)); } catch (TException e) { LOG.error(e.getMessage(), e); closeClient(client); exception = e; } finally { releaseClient(client); } } while (retryPolicy.attemptRetry()); LOG.error("Failed after " + retryPolicy.getRetryCount() + " retries."); Preconditions.checkNotNull(exception); throw new IOException(exception); } /** * Similar to {@link #retryRPC(RpcCallable)} except that the RPC call may throw * {@link AlluxioTException} and once it is thrown, it will be transformed into * {@link AlluxioException} and be thrown. * * @param rpc the RPC call to be executed * @param <V> type of return value of the RPC call * @return the return value of the RPC call * @throws AlluxioException when {@link AlluxioTException} is thrown by the RPC call * @throws IOException when retries exceeds {@link #RPC_MAX_NUM_RETRY} or some server * side IOException occurred. */ protected <V> V retryRPC(RpcCallableThrowsAlluxioTException<V, C> rpc) throws AlluxioException, IOException { TException exception = null; CountingRetry retryPolicy = new CountingRetry(RPC_MAX_NUM_RETRY); do { C client = acquireClient(); try { return rpc.call(client); } catch (AlluxioTException e) { throw AlluxioException.fromThrift(e); } catch (ThriftIOException e) { throw new IOException(e); } catch (TException e) { LOG.error(e.getMessage(), e); closeClient(client); exception = e; } finally { releaseClient(client); } } while (retryPolicy.attemptRetry()); LOG.error("Failed after " + retryPolicy.getRetryCount() + " retries."); Preconditions.checkNotNull(exception); throw new IOException(exception); } /** * Close the given client. * * @param client the client to close */ private void closeClient(C client) { client.getOutputProtocol().getTransport().close(); } }
style fix
core/common/src/main/java/alluxio/AbstractThriftClient.java
style fix
<ide><path>ore/common/src/main/java/alluxio/AbstractThriftClient.java <ide> import org.slf4j.LoggerFactory; <ide> <ide> import java.io.IOException; <del> <del>import javax.annotation.concurrent.ThreadSafe; <ide> <ide> /** <ide> * The base class for clients that use {@link alluxio.network.connection.ThriftClientPool}.
Java
mit
error: pathspec 'Application.java' did not match any file(s) known to git
bec0a88b9a31bd61de099100255141c48f3008fa
1
msshah/Iconification
public class Application { // dc public Application() { Iconification iconification = new Iconification(); } // main public static void main(String[] args) { Application app = new Application(); } // end of main } // end of class
Application.java
Added files via upload
Application.java
Added files via upload
<ide><path>pplication.java <add>public class Application { <add> <add> // dc <add> public Application() { <add> Iconification iconification = new Iconification(); <add> } <add> // main <add> public static void main(String[] args) { <add> Application app = new Application(); <add> } // end of main <add>} // end of class
Java
apache-2.0
23c05699c2c415e8dfc2bd47a5861c1b16d376a7
0
kuujo/onos,LorenzReinhart/ONOSnew,ravikumaran2015/ravikumaran201504,maheshraju-Huawei/actn,donNewtonAlpha/onos,oplinkoms/onos,mengmoya/onos,planoAccess/clonedONOS,castroflavio/onos,CNlukai/onos-gerrit-test,jmiserez/onos,castroflavio/onos,opennetworkinglab/onos,VinodKumarS-Huawei/ietf96yang,oplinkoms/onos,sonu283304/onos,gkatsikas/onos,y-higuchi/onos,oeeagle/onos,kuangrewawa/onos,oplinkoms/onos,chenxiuyang/onos,rvhub/onos,maheshraju-Huawei/actn,Shashikanth-Huawei/bmp,planoAccess/clonedONOS,rvhub/onos,SmartInfrastructures/dreamer,y-higuchi/onos,chinghanyu/onos,kuujo/onos,donNewtonAlpha/onos,planoAccess/clonedONOS,rvhub/onos,jmiserez/onos,LorenzReinhart/ONOSnew,zsh2938/onos,castroflavio/onos,gkatsikas/onos,CNlukai/onos-gerrit-test,SmartInfrastructures/dreamer,y-higuchi/onos,jinlongliu/onos,oplinkoms/onos,kkkane/ONOS,opennetworkinglab/onos,Shashikanth-Huawei/bmp,chenxiuyang/onos,osinstom/onos,chenxiuyang/onos,planoAccess/clonedONOS,opennetworkinglab/onos,sdnwiselab/onos,VinodKumarS-Huawei/ietf96yang,jmiserez/onos,VinodKumarS-Huawei/ietf96yang,lsinfo3/onos,maheshraju-Huawei/actn,SmartInfrastructures/dreamer,oeeagle/onos,castroflavio/onos,sdnwiselab/onos,Shashikanth-Huawei/bmp,gkatsikas/onos,kuujo/onos,y-higuchi/onos,kuujo/onos,CNlukai/onos-gerrit-test,opennetworkinglab/onos,kkkane/ONOS,sdnwiselab/onos,Shashikanth-Huawei/bmp,chenxiuyang/onos,lsinfo3/onos,donNewtonAlpha/onos,zsh2938/onos,kkkane/ONOS,jinlongliu/onos,packet-tracker/onos,gkatsikas/onos,packet-tracker/onos,zsh2938/onos,mengmoya/onos,zsh2938/onos,LorenzReinhart/ONOSnew,donNewtonAlpha/onos,VinodKumarS-Huawei/ietf96yang,osinstom/onos,jmiserez/onos,jinlongliu/onos,oplinkoms/onos,osinstom/onos,chinghanyu/onos,maheshraju-Huawei/actn,opennetworkinglab/onos,mengmoya/onos,ravikumaran2015/ravikumaran201504,oeeagle/onos,maheshraju-Huawei/actn,sdnwiselab/onos,kuujo/onos,y-higuchi/onos,jinlongliu/onos,mengmoya/onos,donNewtonAlpha/onos,mengmoya/onos,sonu283304/onos,oplinkoms/onos,sdnwiselab/onos,sonu283304/onos,kkkane/ONOS,packet-tracker/onos,gkatsikas/onos,packet-tracker/onos,kuujo/onos,CNlukai/onos-gerrit-test,Shashikanth-Huawei/bmp,gkatsikas/onos,lsinfo3/onos,kuujo/onos,kuangrewawa/onos,kuangrewawa/onos,osinstom/onos,LorenzReinhart/ONOSnew,osinstom/onos,LorenzReinhart/ONOSnew,sdnwiselab/onos,oplinkoms/onos,chinghanyu/onos,chinghanyu/onos,lsinfo3/onos,ravikumaran2015/ravikumaran201504,VinodKumarS-Huawei/ietf96yang,rvhub/onos,sonu283304/onos,opennetworkinglab/onos,oeeagle/onos
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onlab.onos.sdnip.bgp; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.onlab.junit.TestUtils; import org.onlab.junit.TestUtils.TestUtilsException; import org.onlab.onos.sdnip.RouteListener; import org.onlab.onos.sdnip.RouteUpdate; import org.onlab.packet.Ip4Address; import org.onlab.packet.Ip4Prefix; import com.google.common.net.InetAddresses; /** * Unit tests for the BgpSessionManager class. */ public class BgpSessionManagerTest { private static final Ip4Address IP_LOOPBACK_ID = Ip4Address.valueOf("127.0.0.1"); private static final Ip4Address BGP_PEER1_ID = Ip4Address.valueOf("10.0.0.1"); private static final long DEFAULT_LOCAL_PREF = 10; private static final long DEFAULT_MULTI_EXIT_DISC = 20; // Timeout waiting for a message to be received private static final int MESSAGE_TIMEOUT_MS = 5000; // 5s // The BGP Session Manager to test private BgpSessionManager bgpSessionManager; // Remote Peer state private ClientBootstrap peerBootstrap; private TestBgpPeerChannelHandler peerChannelHandler = new TestBgpPeerChannelHandler(BGP_PEER1_ID, DEFAULT_LOCAL_PREF); private TestBgpPeerFrameDecoder peerFrameDecoder = new TestBgpPeerFrameDecoder(); // The socket that the Remote Peer should connect to private InetSocketAddress connectToSocket; private final DummyRouteListener dummyRouteListener = new DummyRouteListener(); /** * Dummy implementation for the RouteListener interface. */ private class DummyRouteListener implements RouteListener { @Override public void update(Collection<RouteUpdate> routeUpdate) { // Nothing to do } } @Before public void setUp() throws Exception { // // Setup the BGP Session Manager to test, and start listening for BGP // connections. // bgpSessionManager = new BgpSessionManager(dummyRouteListener); // NOTE: We use port 0 to bind on any available port bgpSessionManager.start(0); // Get the port number the BGP Session Manager is listening on Channel serverChannel = TestUtils.getField(bgpSessionManager, "serverChannel"); SocketAddress socketAddress = serverChannel.getLocalAddress(); InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress; // // Setup the BGP Peer, i.e., the "remote" BGP router that will // initiate the BGP connection, send BGP UPDATE messages, etc. // ChannelFactory channelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); ChannelPipelineFactory pipelineFactory = new ChannelPipelineFactory() { @Override public ChannelPipeline getPipeline() throws Exception { // Setup the transmitting pipeline ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast("TestBgpPeerFrameDecoder", peerFrameDecoder); pipeline.addLast("TestBgpPeerChannelHandler", peerChannelHandler); return pipeline; } }; peerBootstrap = new ClientBootstrap(channelFactory); peerBootstrap.setOption("child.keepAlive", true); peerBootstrap.setOption("child.tcpNoDelay", true); peerBootstrap.setPipelineFactory(pipelineFactory); InetAddress connectToAddress = InetAddresses.forString("127.0.0.1"); connectToSocket = new InetSocketAddress(connectToAddress, inetSocketAddress.getPort()); } @After public void tearDown() throws Exception { bgpSessionManager.stop(); bgpSessionManager = null; } /** * Gets BGP RIB-IN routes by waiting until they are received. * <p/> * NOTE: We keep checking once a second the number of received routes, * up to 5 seconds. * * @param bgpSession the BGP session that is expected to receive the * routes * @param expectedRoutes the expected number of routes * @return the BGP RIB-IN routes as received within the expected * time interval */ private Collection<BgpRouteEntry> waitForBgpRibIn(BgpSession bgpSession, long expectedRoutes) throws InterruptedException { Collection<BgpRouteEntry> bgpRibIn = bgpSession.getBgpRibIn(); final int maxChecks = 5; // Max wait of 5 seconds for (int i = 0; i < maxChecks; i++) { if (bgpRibIn.size() == expectedRoutes) { break; } Thread.sleep(1000); bgpRibIn = bgpSession.getBgpRibIn(); } return bgpRibIn; } /** * Gets BGP merged routes by waiting until they are received. * <p/> * NOTE: We keep checking once a second the number of received routes, * up to 5 seconds. * * @param expectedRoutes the expected number of routes * @return the BGP Session Manager routes as received within the expected * time interval */ private Collection<BgpRouteEntry> waitForBgpRoutes(long expectedRoutes) throws InterruptedException { Collection<BgpRouteEntry> bgpRoutes = bgpSessionManager.getBgpRoutes(); final int maxChecks = 5; // Max wait of 5 seconds for (int i = 0; i < maxChecks; i++) { if (bgpRoutes.size() == expectedRoutes) { break; } Thread.sleep(1000); bgpRoutes = bgpSessionManager.getBgpRoutes(); } return bgpRoutes; } /** * Tests that the BGP OPEN messages have been exchanged, followed by * KEEPALIVE. * <p> * The BGP Peer opens the sessions and transmits OPEN Message, eventually * followed by KEEPALIVE. The tested BGP listener should respond by * OPEN Message, followed by KEEPALIVE. * * @throws TestUtilsException TestUtils error */ @Test public void testExchangedBgpOpenMessages() throws InterruptedException, TestUtilsException { // Initiate the connection peerBootstrap.connect(connectToSocket); // Wait until the OPEN message is received peerFrameDecoder.receivedOpenMessageLatch.await(MESSAGE_TIMEOUT_MS, TimeUnit.MILLISECONDS); // Wait until the KEEPALIVE message is received peerFrameDecoder.receivedKeepaliveMessageLatch.await(MESSAGE_TIMEOUT_MS, TimeUnit.MILLISECONDS); // // Test the fields from the BGP OPEN message: // BGP version, AS number, BGP ID // assertThat(peerFrameDecoder.remoteBgpVersion, is(BgpConstants.BGP_VERSION)); assertThat(peerFrameDecoder.remoteAs, is(TestBgpPeerChannelHandler.PEER_AS)); assertThat(peerFrameDecoder.remoteBgpIdentifier, is(IP_LOOPBACK_ID)); // // Test that a BgpSession instance has been created // assertThat(bgpSessionManager.getMyBgpId(), is(IP_LOOPBACK_ID)); assertThat(bgpSessionManager.getBgpSessions(), hasSize(1)); BgpSession bgpSession = bgpSessionManager.getBgpSessions().iterator().next(); assertThat(bgpSession, notNullValue()); long sessionAs = TestUtils.getField(bgpSession, "localAs"); assertThat(sessionAs, is(TestBgpPeerChannelHandler.PEER_AS)); } /** * Tests that the BGP UPDATE messages have been received and processed. */ @Test public void testProcessedBgpUpdateMessages() throws InterruptedException { BgpSession bgpSession; Ip4Address nextHopRouter; BgpRouteEntry bgpRouteEntry; ChannelBuffer message; Collection<BgpRouteEntry> bgpRibIn; Collection<BgpRouteEntry> bgpRoutes; // Initiate the connection peerBootstrap.connect(connectToSocket); // Wait until the OPEN message is received peerFrameDecoder.receivedOpenMessageLatch.await(MESSAGE_TIMEOUT_MS, TimeUnit.MILLISECONDS); // Wait until the KEEPALIVE message is received peerFrameDecoder.receivedKeepaliveMessageLatch.await(MESSAGE_TIMEOUT_MS, TimeUnit.MILLISECONDS); // Get the BGP Session handler bgpSession = bgpSessionManager.getBgpSessions().iterator().next(); // Prepare routes to add/delete nextHopRouter = Ip4Address.valueOf("10.20.30.40"); Collection<Ip4Prefix> addedRoutes = new LinkedList<>(); Collection<Ip4Prefix> withdrawnRoutes = new LinkedList<>(); addedRoutes.add(Ip4Prefix.valueOf("0.0.0.0/0")); addedRoutes.add(Ip4Prefix.valueOf("20.0.0.0/8")); addedRoutes.add(Ip4Prefix.valueOf("30.0.0.0/16")); addedRoutes.add(Ip4Prefix.valueOf("40.0.0.0/24")); addedRoutes.add(Ip4Prefix.valueOf("50.0.0.0/32")); withdrawnRoutes.add(Ip4Prefix.valueOf("60.0.0.0/8")); withdrawnRoutes.add(Ip4Prefix.valueOf("70.0.0.0/16")); withdrawnRoutes.add(Ip4Prefix.valueOf("80.0.0.0/24")); withdrawnRoutes.add(Ip4Prefix.valueOf("90.0.0.0/32")); // Write the routes message = peerChannelHandler.prepareBgpUpdate(nextHopRouter, addedRoutes, withdrawnRoutes); peerChannelHandler.savedCtx.getChannel().write(message); // Check that the routes have been received, processed and stored bgpRibIn = waitForBgpRibIn(bgpSession, 5); assertThat(bgpRibIn, hasSize(5)); bgpRoutes = waitForBgpRoutes(5); assertThat(bgpRoutes, hasSize(5)); // Setup the AS Path ArrayList<BgpRouteEntry.PathSegment> pathSegments = new ArrayList<>(); byte pathSegmentType1 = (byte) BgpConstants.Update.AsPath.AS_SEQUENCE; ArrayList<Long> segmentAsNumbers1 = new ArrayList<>(); segmentAsNumbers1.add((long) 65010); segmentAsNumbers1.add((long) 65020); segmentAsNumbers1.add((long) 65030); BgpRouteEntry.PathSegment pathSegment1 = new BgpRouteEntry.PathSegment(pathSegmentType1, segmentAsNumbers1); pathSegments.add(pathSegment1); // byte pathSegmentType2 = (byte) BgpConstants.Update.AsPath.AS_SET; ArrayList<Long> segmentAsNumbers2 = new ArrayList<>(); segmentAsNumbers2.add((long) 65041); segmentAsNumbers2.add((long) 65042); segmentAsNumbers2.add((long) 65043); BgpRouteEntry.PathSegment pathSegment2 = new BgpRouteEntry.PathSegment(pathSegmentType2, segmentAsNumbers2); pathSegments.add(pathSegment2); // BgpRouteEntry.AsPath asPath = new BgpRouteEntry.AsPath(pathSegments); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("0.0.0.0/0"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("20.0.0.0/8"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("30.0.0.0/16"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("40.0.0.0/24"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("50.0.0.0/32"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // Delete some routes addedRoutes = new LinkedList<>(); withdrawnRoutes = new LinkedList<>(); withdrawnRoutes.add(Ip4Prefix.valueOf("0.0.0.0/0")); withdrawnRoutes.add(Ip4Prefix.valueOf("50.0.0.0/32")); // Write the routes message = peerChannelHandler.prepareBgpUpdate(nextHopRouter, addedRoutes, withdrawnRoutes); peerChannelHandler.savedCtx.getChannel().write(message); // Check that the routes have been received, processed and stored bgpRibIn = waitForBgpRibIn(bgpSession, 3); assertThat(bgpRibIn, hasSize(3)); bgpRoutes = waitForBgpRoutes(3); assertThat(bgpRoutes, hasSize(3)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("20.0.0.0/8"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("30.0.0.0/16"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("40.0.0.0/24"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // Close the channel and test there are no routes peerChannelHandler.closeChannel(); bgpRoutes = waitForBgpRoutes(0); assertThat(bgpRoutes, hasSize(0)); } }
apps/sdnip/src/test/java/org/onlab/onos/sdnip/bgp/BgpSessionManagerTest.java
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onlab.onos.sdnip.bgp; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFactory; import org.jboss.netty.channel.ChannelPipeline; import org.jboss.netty.channel.ChannelPipelineFactory; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.onlab.junit.TestUtils; import org.onlab.junit.TestUtils.TestUtilsException; import org.onlab.onos.sdnip.RouteListener; import org.onlab.onos.sdnip.RouteUpdate; import org.onlab.packet.Ip4Address; import org.onlab.packet.Ip4Prefix; import com.google.common.net.InetAddresses; /** * Unit tests for the BgpSessionManager class. */ public class BgpSessionManagerTest { private static final Ip4Address IP_LOOPBACK_ID = Ip4Address.valueOf("127.0.0.1"); private static final Ip4Address BGP_PEER1_ID = Ip4Address.valueOf("10.0.0.1"); private static final long DEFAULT_LOCAL_PREF = 10; private static final long DEFAULT_MULTI_EXIT_DISC = 20; // The BGP Session Manager to test private BgpSessionManager bgpSessionManager; // Remote Peer state private ClientBootstrap peerBootstrap; private TestBgpPeerChannelHandler peerChannelHandler = new TestBgpPeerChannelHandler(BGP_PEER1_ID, DEFAULT_LOCAL_PREF); private TestBgpPeerFrameDecoder peerFrameDecoder = new TestBgpPeerFrameDecoder(); // The socket that the Remote Peer should connect to private InetSocketAddress connectToSocket; private final DummyRouteListener dummyRouteListener = new DummyRouteListener(); /** * Dummy implementation for the RouteListener interface. */ private class DummyRouteListener implements RouteListener { @Override public void update(Collection<RouteUpdate> routeUpdate) { // Nothing to do } } @Before public void setUp() throws Exception { // // Setup the BGP Session Manager to test, and start listening for BGP // connections. // bgpSessionManager = new BgpSessionManager(dummyRouteListener); // NOTE: We use port 0 to bind on any available port bgpSessionManager.start(0); // Get the port number the BGP Session Manager is listening on Channel serverChannel = TestUtils.getField(bgpSessionManager, "serverChannel"); SocketAddress socketAddress = serverChannel.getLocalAddress(); InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress; // // Setup the BGP Peer, i.e., the "remote" BGP router that will // initiate the BGP connection, send BGP UPDATE messages, etc. // ChannelFactory channelFactory = new NioClientSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); ChannelPipelineFactory pipelineFactory = new ChannelPipelineFactory() { @Override public ChannelPipeline getPipeline() throws Exception { // Setup the transmitting pipeline ChannelPipeline pipeline = Channels.pipeline(); pipeline.addLast("TestBgpPeerFrameDecoder", peerFrameDecoder); pipeline.addLast("TestBgpPeerChannelHandler", peerChannelHandler); return pipeline; } }; peerBootstrap = new ClientBootstrap(channelFactory); peerBootstrap.setOption("child.keepAlive", true); peerBootstrap.setOption("child.tcpNoDelay", true); peerBootstrap.setPipelineFactory(pipelineFactory); InetAddress connectToAddress = InetAddresses.forString("127.0.0.1"); connectToSocket = new InetSocketAddress(connectToAddress, inetSocketAddress.getPort()); } @After public void tearDown() throws Exception { bgpSessionManager.stop(); bgpSessionManager = null; } /** * Gets BGP RIB-IN routes by waiting until they are received. * <p/> * NOTE: We keep checking once a second the number of received routes, * up to 5 seconds. * * @param bgpSession the BGP session that is expected to receive the * routes * @param expectedRoutes the expected number of routes * @return the BGP RIB-IN routes as received within the expected * time interval */ private Collection<BgpRouteEntry> waitForBgpRibIn(BgpSession bgpSession, long expectedRoutes) throws InterruptedException { Collection<BgpRouteEntry> bgpRibIn = bgpSession.getBgpRibIn(); final int maxChecks = 5; // Max wait of 5 seconds for (int i = 0; i < maxChecks; i++) { if (bgpRibIn.size() == expectedRoutes) { break; } Thread.sleep(1000); bgpRibIn = bgpSession.getBgpRibIn(); } return bgpRibIn; } /** * Gets BGP merged routes by waiting until they are received. * <p/> * NOTE: We keep checking once a second the number of received routes, * up to 5 seconds. * * @param expectedRoutes the expected number of routes * @return the BGP Session Manager routes as received within the expected * time interval */ private Collection<BgpRouteEntry> waitForBgpRoutes(long expectedRoutes) throws InterruptedException { Collection<BgpRouteEntry> bgpRoutes = bgpSessionManager.getBgpRoutes(); final int maxChecks = 5; // Max wait of 5 seconds for (int i = 0; i < maxChecks; i++) { if (bgpRoutes.size() == expectedRoutes) { break; } Thread.sleep(1000); bgpRoutes = bgpSessionManager.getBgpRoutes(); } return bgpRoutes; } /** * Tests that the BGP OPEN messages have been exchanged, followed by * KEEPALIVE. * <p> * The BGP Peer opens the sessions and transmits OPEN Message, eventually * followed by KEEPALIVE. The tested BGP listener should respond by * OPEN Message, followed by KEEPALIVE. * * @throws TestUtilsException TestUtils error */ @Test public void testExchangedBgpOpenMessages() throws InterruptedException, TestUtilsException { // Initiate the connection peerBootstrap.connect(connectToSocket); // Wait until the OPEN message is received peerFrameDecoder.receivedOpenMessageLatch.await(2000, TimeUnit.MILLISECONDS); // Wait until the KEEPALIVE message is received peerFrameDecoder.receivedKeepaliveMessageLatch.await(2000, TimeUnit.MILLISECONDS); // // Test the fields from the BGP OPEN message: // BGP version, AS number, BGP ID // assertThat(peerFrameDecoder.remoteBgpVersion, is(BgpConstants.BGP_VERSION)); assertThat(peerFrameDecoder.remoteAs, is(TestBgpPeerChannelHandler.PEER_AS)); assertThat(peerFrameDecoder.remoteBgpIdentifier, is(IP_LOOPBACK_ID)); // // Test that a BgpSession instance has been created // assertThat(bgpSessionManager.getMyBgpId(), is(IP_LOOPBACK_ID)); assertThat(bgpSessionManager.getBgpSessions(), hasSize(1)); BgpSession bgpSession = bgpSessionManager.getBgpSessions().iterator().next(); assertThat(bgpSession, notNullValue()); long sessionAs = TestUtils.getField(bgpSession, "localAs"); assertThat(sessionAs, is(TestBgpPeerChannelHandler.PEER_AS)); } /** * Tests that the BGP UPDATE messages have been received and processed. */ @Test public void testProcessedBgpUpdateMessages() throws InterruptedException { BgpSession bgpSession; Ip4Address nextHopRouter; BgpRouteEntry bgpRouteEntry; ChannelBuffer message; Collection<BgpRouteEntry> bgpRibIn; Collection<BgpRouteEntry> bgpRoutes; // Initiate the connection peerBootstrap.connect(connectToSocket); // Wait until the OPEN message is received peerFrameDecoder.receivedOpenMessageLatch.await(2000, TimeUnit.MILLISECONDS); // Wait until the KEEPALIVE message is received peerFrameDecoder.receivedKeepaliveMessageLatch.await(2000, TimeUnit.MILLISECONDS); // Get the BGP Session handler bgpSession = bgpSessionManager.getBgpSessions().iterator().next(); // Prepare routes to add/delete nextHopRouter = Ip4Address.valueOf("10.20.30.40"); Collection<Ip4Prefix> addedRoutes = new LinkedList<>(); Collection<Ip4Prefix> withdrawnRoutes = new LinkedList<>(); addedRoutes.add(Ip4Prefix.valueOf("0.0.0.0/0")); addedRoutes.add(Ip4Prefix.valueOf("20.0.0.0/8")); addedRoutes.add(Ip4Prefix.valueOf("30.0.0.0/16")); addedRoutes.add(Ip4Prefix.valueOf("40.0.0.0/24")); addedRoutes.add(Ip4Prefix.valueOf("50.0.0.0/32")); withdrawnRoutes.add(Ip4Prefix.valueOf("60.0.0.0/8")); withdrawnRoutes.add(Ip4Prefix.valueOf("70.0.0.0/16")); withdrawnRoutes.add(Ip4Prefix.valueOf("80.0.0.0/24")); withdrawnRoutes.add(Ip4Prefix.valueOf("90.0.0.0/32")); // Write the routes message = peerChannelHandler.prepareBgpUpdate(nextHopRouter, addedRoutes, withdrawnRoutes); peerChannelHandler.savedCtx.getChannel().write(message); // Check that the routes have been received, processed and stored bgpRibIn = waitForBgpRibIn(bgpSession, 5); assertThat(bgpRibIn, hasSize(5)); bgpRoutes = waitForBgpRoutes(5); assertThat(bgpRoutes, hasSize(5)); // Setup the AS Path ArrayList<BgpRouteEntry.PathSegment> pathSegments = new ArrayList<>(); byte pathSegmentType1 = (byte) BgpConstants.Update.AsPath.AS_SEQUENCE; ArrayList<Long> segmentAsNumbers1 = new ArrayList<>(); segmentAsNumbers1.add((long) 65010); segmentAsNumbers1.add((long) 65020); segmentAsNumbers1.add((long) 65030); BgpRouteEntry.PathSegment pathSegment1 = new BgpRouteEntry.PathSegment(pathSegmentType1, segmentAsNumbers1); pathSegments.add(pathSegment1); // byte pathSegmentType2 = (byte) BgpConstants.Update.AsPath.AS_SET; ArrayList<Long> segmentAsNumbers2 = new ArrayList<>(); segmentAsNumbers2.add((long) 65041); segmentAsNumbers2.add((long) 65042); segmentAsNumbers2.add((long) 65043); BgpRouteEntry.PathSegment pathSegment2 = new BgpRouteEntry.PathSegment(pathSegmentType2, segmentAsNumbers2); pathSegments.add(pathSegment2); // BgpRouteEntry.AsPath asPath = new BgpRouteEntry.AsPath(pathSegments); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("0.0.0.0/0"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("20.0.0.0/8"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("30.0.0.0/16"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("40.0.0.0/24"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("50.0.0.0/32"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // Delete some routes addedRoutes = new LinkedList<>(); withdrawnRoutes = new LinkedList<>(); withdrawnRoutes.add(Ip4Prefix.valueOf("0.0.0.0/0")); withdrawnRoutes.add(Ip4Prefix.valueOf("50.0.0.0/32")); // Write the routes message = peerChannelHandler.prepareBgpUpdate(nextHopRouter, addedRoutes, withdrawnRoutes); peerChannelHandler.savedCtx.getChannel().write(message); // Check that the routes have been received, processed and stored bgpRibIn = waitForBgpRibIn(bgpSession, 3); assertThat(bgpRibIn, hasSize(3)); bgpRoutes = waitForBgpRoutes(3); assertThat(bgpRoutes, hasSize(3)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("20.0.0.0/8"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("30.0.0.0/16"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // bgpRouteEntry = new BgpRouteEntry(bgpSession, Ip4Prefix.valueOf("40.0.0.0/24"), nextHopRouter, (byte) BgpConstants.Update.Origin.IGP, asPath, DEFAULT_LOCAL_PREF); bgpRouteEntry.setMultiExitDisc(DEFAULT_MULTI_EXIT_DISC); assertThat(bgpRibIn, hasItem(bgpRouteEntry)); // Close the channel and test there are no routes peerChannelHandler.closeChannel(); bgpRoutes = waitForBgpRoutes(0); assertThat(bgpRoutes, hasSize(0)); } }
SDN-IP unit test fix: Increased the timeout waiting for a message to be received from 2s to 5s. Eventually, this should address some rare test failures. Change-Id: Ifbf024023580ac93f10e1b7fb1b1f5dd2fe361f9
apps/sdnip/src/test/java/org/onlab/onos/sdnip/bgp/BgpSessionManagerTest.java
SDN-IP unit test fix: Increased the timeout waiting for a message to be received from 2s to 5s. Eventually, this should address some rare test failures.
<ide><path>pps/sdnip/src/test/java/org/onlab/onos/sdnip/bgp/BgpSessionManagerTest.java <ide> private static final long DEFAULT_LOCAL_PREF = 10; <ide> private static final long DEFAULT_MULTI_EXIT_DISC = 20; <ide> <add> // Timeout waiting for a message to be received <add> private static final int MESSAGE_TIMEOUT_MS = 5000; // 5s <add> <ide> // The BGP Session Manager to test <ide> private BgpSessionManager bgpSessionManager; <ide> <ide> peerBootstrap.connect(connectToSocket); <ide> <ide> // Wait until the OPEN message is received <del> peerFrameDecoder.receivedOpenMessageLatch.await(2000, <add> peerFrameDecoder.receivedOpenMessageLatch.await(MESSAGE_TIMEOUT_MS, <ide> TimeUnit.MILLISECONDS); <ide> // Wait until the KEEPALIVE message is received <del> peerFrameDecoder.receivedKeepaliveMessageLatch.await(2000, <add> peerFrameDecoder.receivedKeepaliveMessageLatch.await(MESSAGE_TIMEOUT_MS, <ide> TimeUnit.MILLISECONDS); <ide> <ide> // <ide> peerBootstrap.connect(connectToSocket); <ide> <ide> // Wait until the OPEN message is received <del> peerFrameDecoder.receivedOpenMessageLatch.await(2000, <add> peerFrameDecoder.receivedOpenMessageLatch.await(MESSAGE_TIMEOUT_MS, <ide> TimeUnit.MILLISECONDS); <ide> // Wait until the KEEPALIVE message is received <del> peerFrameDecoder.receivedKeepaliveMessageLatch.await(2000, <add> peerFrameDecoder.receivedKeepaliveMessageLatch.await(MESSAGE_TIMEOUT_MS, <ide> TimeUnit.MILLISECONDS); <ide> <ide> // Get the BGP Session handler
Java
mit
6acc63f23c4a52ce69ffef3fa928bff73af0c72f
0
xxyy/xyc
/* * Copyright (c) 2013 - 2015 xxyy (Philipp Nowak; [email protected]). All rights reserved. * * Any usage, including, but not limited to, compiling, running, redistributing, printing, * copying and reverse-engineering is strictly prohibited without explicit written permission * from the original author and may result in legal steps being taken. * * See the included LICENSE file (core/src/main/resources) or email [email protected] for details. */ package li.l1t.common.sql; import com.google.common.base.Preconditions; import li.l1t.common.util.Closer; import li.l1t.common.util.TextOutputHelper; import org.apache.commons.lang.Validate; import org.jetbrains.annotations.Nullable; import javax.annotation.Nonnull; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.Collection; import java.util.function.Function; import java.util.logging.Level; import java.util.logging.Logger; /** * A class providing methods to connect to SQL databases. Requires sql driver to be in CLASSPATH. * <p><b>Note:</b> Use of this class in new code is discouraged due to severe design flaws. Prefer * {@link li.l1t.common.sql.sane.SaneSql} implementations for a well-defined API and concise method * names as well as interfaces instead of concrete classes. Due to its widespread use all over * multiple code bases, it has not been deprecated yet - the non-deprecated methods still work * fine.</p> * * @author xxyy98 * @since forever */ public class SafeSql implements AutoCloseable, PreparedStatementFactory { private static final Logger LOGGER = Logger.getLogger(SafeSql.class.getName()); /** * A DEBUG switch to print every single query made by ANY {@link SafeSql} to {@link * System#out}. * * @deprecated bad architecture */ @Deprecated public static boolean debug = false; public final String dbName; /** * A logger to print errors to * * @deprecated internal field that shouldn't have been exposed */ @Deprecated public Logger errLogger = null; /** * Connection maintained by this SafeSql. */ private Connection currentConnection = null; /** * {@link SqlConnectable} providing connection data for this {@link SafeSql}. */ private SqlConnectable authDataProvider; /** * Constructs a new instance. * * @param pl {@link SqlConnectable} providing login data * @throws IllegalArgumentException If plug is {@code null}. * @deprecated use {@link li.l1t.common.sql.sane.SaneSql} instead ({@link SafeSql more info}) */ @Deprecated public SafeSql(SqlConnectable pl) { Validate.notNull(pl); this.authDataProvider = pl; this.dbName = authDataProvider.getSqlDb(); } public static Logger getLogger() { return LOGGER; } /** * Convenience method. Tries to close an {@link AutoCloseable}. If it could not be closed, logs * the encountered exception. * * @param closeable What to close * @return {@code false} if an Exception occurred while closing {@code closeable}, {@code true} * otherwise. * @deprecated Use {@link li.l1t.common.util.Closer#close(AutoCloseable)} */ @Deprecated public static boolean tryClose(AutoCloseable closeable) { if (closeable == null) { return true; } try { closeable.close(); } catch (Exception exc) { Logger.getLogger(SafeSql.class.getName()).log(Level.WARNING, "Could not close something: " + closeable, exc); return false; } return true; } /** * WARNING UNSAFE * * @param query QUERY to execute * @return ResultSet * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is * no way to close it. That may lead to severe memory leaks. Use {@link * #executeQueryWithResult(String, Object...)} instead. */ @Deprecated public ResultSet executeQuery(String query) { try { PreparedStatement stmnt = this.getAnyConnection().prepareStatement(query); return stmnt.executeQuery(); } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to execute Query: '" + query + "'"); } return null; } /** * This executes a normal update statement in this object's connection. * * @param query UPDATE to be executed * @return Whether the operation succeeded * @see #executeUpdateWithResult(String, Object...) */ public boolean executeUpdate(String query) { try (PreparedStatement stmnt = this.getAnyConnection().prepareStatement(query)) { stmnt.executeUpdate(); return true; } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to execute update: '" + query + "'"); } return false; } /** * formats an exception, prints a line before it, then prints (to Ops &amp; console) &amp; logs * it. * * @param e Exception to use * @param firstLine A line describing the error, normally class &amp; method name - more * efficient than getting the caller * @deprecated this is not the job of this class */ @Deprecated public void formatAndPrintException(SQLException e, String firstLine) { System.out.println(firstLine); System.out.println("§4SQL Error " + e.getErrorCode() + ": " + e.getLocalizedMessage()); if (this.errLogger != null) { System.out.printf("%s\nSQL ERROR: %s: %s", firstLine, e.getErrorCode(), e.getLocalizedMessage()); } e.printStackTrace(); } /** * Sets up a connection, regardless if it's already established or not. Note that this does * <b>NOT</b> set this object's connection field! * * @return The created {@link java.sql.Connection} * @deprecated internal method that shouldn't have been exposed */ @Deprecated public Connection makeConnection() { Connection c = null; try { String sqlHost = SqlConnectables.getHostString(this.authDataProvider); c = DriverManager.getConnection(sqlHost, this.authDataProvider.getSqlUser(), this.authDataProvider.getSqlPwd()); if (c == null || !c.isValid(5)) { // CommandHelper.sendMessageToOpsAndConsole("§4§l[SEVERE] Could not establish database connection.");// Everybody panic. TextOutputHelper.printAndOrLog("[XYC] Connection to " + sqlHost + " failed.", this.errLogger, Level.SEVERE); } else { DatabaseMetaData meta = c.getMetaData(); TextOutputHelper.printAndOrLog("[XYC] Connected to: " + meta.getURL(), this.errLogger, Level.INFO); } } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to establish connection!"); tryClose(c); } return c; } public Connection getAnyConnection() { try { if (this.currentConnection == null || this.currentConnection.isClosed()) { this.currentConnection = makeConnection(); } } catch (SQLException e) { throw new IllegalStateException(e); } Validate.notNull(currentConnection, "Could not make connection!"); return getCurrentConnection(); } /** * @return a newly made connection. * @see #makeConnection() * @deprecated Ambiguous name. Kept for compatibility with previous code. */ @Deprecated public Connection getConnection() { return makeConnection(); } /** * Safely prepares a statement. Remember to close it afterwards. Insert values by using '?'. <p> * Example: {@code PreparedStatement stmt = null; try{ stmt = safesql.prepareStatement("UPDATE * "+safesql.dbName+".users SET status = 0 AND some_string = ? WHERE user_id = ?"); if(stmt == * null) panic(); stmt.setString(1,"THAT'S SAFE");//just look at what you can't do now! * stmt.setInt(2,42); stmt.executeUpdate(); }finally{ try{ if(stmt != null){ stmt.close(); } * }catch{ logAndThenPanic(); } } }</p> * * @param query Query to prepare (may contain '?') * @return {@link PreparedStatement}; not executed OR null at failure * @deprecated internal method that shouldn't have been exposed */ @Nonnull @Deprecated public PreparedStatement prepareStatement(@Nonnull String query) throws SQLException { PreparedStatement stmt = this.getAnyConnection().prepareStatement(query); Validate.notNull(stmt); return stmt; } /** * Tries to close any open {@link Connection} managed by this object. */ public void preReload() { Closer.close(this); } /** * SAFE * * @param query Query to execute * @param ints ints to insert using {@link PreparedStatement#setInt(int, int)} * @return ResultSet * @see #executeQuery(String) * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is * no way to close it. That may lead to severe memory leaks. Use {@link * #executeQueryWithResult(String, Object...)} instead. */ @Deprecated public ResultSet safelyExecuteQuery(String query, int... ints) { try { PreparedStatement stmnt = this.prepareStatement(query); Validate.notNull(stmnt); int i = 1; for (int nr : ints) { stmnt.setInt(i, nr); i++; } return stmnt.executeQuery(); } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to safely execute a query (int): '" + query + "'"); } return null; } /** * SAFE * * @param query Query to execute * @param strings Strings to insert using {@link PreparedStatement#setString(int, String)} * @return ResultSet * @see #executeQuery(String) * @see #executeQueryWithResult(String, Object...) * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is * no way to close it. That may lead to severe memory leaks. Use {@link * #executeQueryWithResult(String, Object...)} instead. */ @Deprecated public ResultSet safelyExecuteQuery(String query, String... strings) { try { PreparedStatement stmnt = this.prepareStatement(query); Validate.notNull(stmnt); int i = 1; for (String str : strings) { stmnt.setString(i, str); i++; } return stmnt.executeQuery(); } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to safely execute a query (String): '" + query + "'"); } return null; } /** * Executes a query in the database by creating a {@link java.sql.PreparedStatement} and filling * with the the given objects. * * @param query Query to execute (? is filled out with the corresponding {@code objects} * value) * @param objects Objects to fill the statement with * @return A QueryResult representing the executed query ({@link QueryResult#getUpdateReturn()} * will be {@code -1}). Remember to always close this! * @throws SQLException When an error occurs while creating the statement, executing the * statement or filling in the values. */ public QueryResult executeQueryWithResult(String query, Object... objects) throws SQLException { PreparedStatement stmt = this.prepareStatement(query); fillStatement(stmt, objects); return new QueryResult(stmt, stmt.executeQuery()); } /** * Executes an update in the database by creating a {@link java.sql.PreparedStatement} and * filling with the the given objects. * * @param query Update to execute (? is filled out with the corresponding {@code objects} * value) * @param objects Objects to fill the statement with * @return A QueryResult representing the executed update ({@link QueryResult#getResultSet()} * will be {@code null}). Remember to always close this! * @throws SQLException When an error occurs while creating the statement, executing the * statement or filling in the values. * @deprecated Uses QueryResult instead of UpdateResult. Use {@link #executeUpdateWithGenKeys(String, * Object...)}. */ @Deprecated public QueryResult executeUpdateWithResult(String query, Object... objects) throws SQLException { PreparedStatement stmt = this.prepareStatement(query); fillStatement(stmt, objects); return new QueryResult(stmt, stmt.executeUpdate()); } /** * Executes an update in the database by creating a {@link java.sql.PreparedStatement} and * filling with the the given objects. * * @param query Update to execute (? is filled out with the corresponding {@code objects} * value) * @param arguments Objects to fill the statement with * @return A QueryResult representing the executed update ({@link QueryResult#getResultSet()} * will be {@code null}). Remember to always close this! * @throws SQLException When an error occurs while creating the statement, executing the * statement or filling in the values. */ @Nonnull public UpdateResult executeUpdateWithGenKeys(@Nonnull String query, Object... arguments) throws SQLException { PreparedStatement stmt = this.getAnyConnection().prepareStatement(query, Statement.RETURN_GENERATED_KEYS); Validate.notNull(stmt); fillStatement(stmt, arguments); return new UpdateResult(stmt.executeUpdate(), stmt.getGeneratedKeys()); //TODO: this does not close the statement, help } @Nullable @Deprecated //internal method public PreparedStatement fillStatement(@Nullable PreparedStatement stmt, @Nonnull Object[] objects) throws SQLException { if (stmt == null) { return null; } for (int i = 0; i < objects.length; i++) { if (objects[i] == null) { stmt.setNull(i + 1, Types.OTHER); } else { stmt.setObject(i + 1, objects[i]); } } return stmt; } /** * SAFE * * @param query Update to execute * @param objects Objects to insert using {@link PreparedStatement#setString(int, String)} * @return int (see: {@link PreparedStatement#executeUpdate()} * @see #executeUpdate(String) */ public int safelyExecuteUpdate(String query, Object... objects) { try (PreparedStatement stmnt = this.prepareStatement(query)) { this.fillStatement(stmnt, objects); return stmnt.executeUpdate(); } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to safely execute an update (String)!"); } return -1; } /** * Executes a set of updates for a given object type in a batch. Note that this can only operate * on same objects and same SQL statements. * * @param sql the SQL update or insert statement to fill with the parameters for * each batch element * @param data a collection of objects representing the data to be written to the * database * @param parameterMapper a mapper function mapping an object to the {@code sql} parameters * representing it, in declaration order. * @param <T> the type of object to be written to the database * @return an integer array, see {@link PreparedStatement#executeBatch()} * @throws SQLException if an error occurs while executing the batch */ public <T> int[] executeBatchUpdate(String sql, Collection<T> data, Function<T, Object[]> parameterMapper) throws SQLException { Preconditions.checkNotNull(data, "data"); Preconditions.checkNotNull(sql, "sql"); Preconditions.checkNotNull(parameterMapper, "parameterMapper"); Connection connection = getAnyConnection(); int[] count; try (PreparedStatement statement = Preconditions.checkNotNull(connection.prepareStatement(sql), "statement")) { connection.setAutoCommit(false); for (T t : data) { Object[] parameters = parameterMapper.apply(t); fillStatement(statement, parameters); statement.addBatch(); } count = statement.executeBatch(); } finally { connection.commit(); connection.setAutoCommit(true); } return count; } @Override public String toString() { String conStr; try { conStr = " [con:" + ((this.currentConnection.isClosed()) ? "" : "!") + "closed" + " @ " + this.currentConnection.getMetaData().getURL() + "]"; } catch (Exception e) { conStr = " [con: ~" + e.getClass().getName() + "~]"; } return getClass().getName() + "->" + this.authDataProvider.getSqlUser() + "@" + this.authDataProvider.getSqlHost() + "|" + this.authDataProvider.getSqlDb() + conStr; } @Override public void close() throws Exception { if (currentConnection != null) { currentConnection.close(); currentConnection = null; } } public Connection getCurrentConnection() { return this.currentConnection; } @Deprecated //internal state public void setCurrentConnection(Connection currentConnection) { this.currentConnection = currentConnection; } }
core/src/main/java/li/l1t/common/sql/SafeSql.java
/* * Copyright (c) 2013 - 2015 xxyy (Philipp Nowak; [email protected]). All rights reserved. * * Any usage, including, but not limited to, compiling, running, redistributing, printing, * copying and reverse-engineering is strictly prohibited without explicit written permission * from the original author and may result in legal steps being taken. * * See the included LICENSE file (core/src/main/resources) or email [email protected] for details. */ package li.l1t.common.sql; import com.google.common.base.Preconditions; import li.l1t.common.util.TextOutputHelper; import org.apache.commons.lang.Validate; import org.jetbrains.annotations.Nullable; import javax.annotation.Nonnull; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.Collection; import java.util.function.Function; import java.util.logging.Level; import java.util.logging.Logger; /** * A class providing methods to connect to SQL databases. Requires sql driver to be in CLASSPATH. * * @author xxyy98 */ public class SafeSql implements AutoCloseable, PreparedStatementFactory { private static final Logger LOGGER = Logger.getLogger(SafeSql.class.getName()); /** * A DEBUG switch to print every single query made by ANY {@link SafeSql} to {@link System#out}. */ public static boolean debug = false; public final String dbName; /** * A logger to print errors to */ public Logger errLogger = null; /** * Connection maintained by this SafeSql. */ private Connection currentConnection = null; /** * {@link SqlConnectable} providing connection data for this {@link SafeSql}. */ private SqlConnectable authDataProvider; /** * Constructs a new instance. * * @param pl {@link SqlConnectable} providing login data * @throws IllegalArgumentException If plug is {@code null}. */ public SafeSql(SqlConnectable pl) { Validate.notNull(pl); this.authDataProvider = pl; this.dbName = authDataProvider.getSqlDb(); } public static Logger getLogger() { return LOGGER; } /** * Convenience method. Tries to close an {@link AutoCloseable}. If it could not be closed, logs the encountered exception. * * @param closeable What to close * @return {@code false} if an Exception occurred while closing {@code closeable}, {@code true} otherwise. */ public static boolean tryClose(AutoCloseable closeable) { if (closeable == null) { return true; } try { closeable.close(); } catch (Exception exc) { Logger.getLogger(SafeSql.class.getName()).log(Level.WARNING, "Could not close something: " + closeable, exc); return false; } return true; } /** * WARNING UNSAFE * * @param query QUERY to execute * @return ResultSet * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is no way to close it. That may lead to severe memory leaks. Use {@link #executeQueryWithResult(String, Object...)} instead. */ @Deprecated public ResultSet executeQuery(String query) { try { PreparedStatement stmnt = this.getAnyConnection().prepareStatement(query); return stmnt.executeQuery(); } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to execute Query: '" + query + "'"); } return null; } /** * This executes a normal update statement in this object's connection. * * @param query UPDATE to be executed * @return Whether the operation succeeded * @see #executeUpdateWithResult(String, Object...) */ public boolean executeUpdate(String query) { try (PreparedStatement stmnt = this.getAnyConnection().prepareStatement(query)) { stmnt.executeUpdate(); return true; } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to execute update: '" + query + "'"); } return false; } /** * formats an exception, prints a line before it, then prints (to Ops &amp; console) &amp; logs it. * * @param e Exception to use * @param firstLine A line describing the error, normally class &amp; method name - more efficient than getting the caller */ public void formatAndPrintException(SQLException e, String firstLine) { System.out.println(firstLine); System.out.println("§4SQL Error " + e.getErrorCode() + ": " + e.getLocalizedMessage()); if (this.errLogger != null){ System.out.printf("%s\nSQL ERROR: %s: %s", firstLine, e.getErrorCode(), e.getLocalizedMessage()); } e.printStackTrace(); } /** * Sets up a connection, regardless if it's already established or not. * Note that this does <b>NOT</b> set this object's connection field! * * @return The created {@link java.sql.Connection} */ public Connection makeConnection() { Connection c = null; try { String sqlHost = SqlConnectables.getHostString(this.authDataProvider); c = DriverManager.getConnection(sqlHost, this.authDataProvider.getSqlUser(), this.authDataProvider.getSqlPwd()); if (c == null || !c.isValid(5)){ // CommandHelper.sendMessageToOpsAndConsole("§4§l[SEVERE] Could not establish database connection.");// Everybody panic. TextOutputHelper.printAndOrLog("[XYC] Connection to " + sqlHost + " failed.", this.errLogger, Level.SEVERE); } else { DatabaseMetaData meta = c.getMetaData(); TextOutputHelper.printAndOrLog("[XYC] Connected to: " + meta.getURL(), this.errLogger, Level.INFO); } } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to establish connection!"); tryClose(c); } return c; } public Connection getAnyConnection() { try { if (this.currentConnection == null || this.currentConnection.isClosed()){ this.currentConnection = makeConnection(); } } catch (SQLException e) { throw new IllegalStateException(e); } Validate.notNull(currentConnection, "Could not make connection!"); return getCurrentConnection(); } /** * @return a newly made connection. * @see #makeConnection() * @deprecated Ambiguous name. Kept for compatibility with previous code. */ @Deprecated public Connection getConnection() { return makeConnection(); } /** * Safely prepares a statement. Remember to close it afterwards. Insert values by using '?'. * <p> * Example: * {@code * PreparedStatement stmt = null; * try{ * stmt = safesql.prepareStatement("UPDATE "+safesql.dbName+".users SET status = 0 AND some_string = ? WHERE user_id = ?"); * if(stmt == null) panic(); * stmt.setString(1,"THAT'S SAFE");//just look at what you can't do now! * stmt.setInt(2,42); * stmt.executeUpdate(); * }finally{ * try{ if(stmt != null){ stmt.close(); } }catch{ logAndThenPanic(); } } * }</p> * * @param query Query to prepare (may contain '?') * @return {@link PreparedStatement}; not executed OR null at failure */ @Nonnull public PreparedStatement prepareStatement(@Nonnull String query) throws SQLException { PreparedStatement stmt = this.getAnyConnection().prepareStatement(query); Validate.notNull(stmt); return stmt; } /** * Tries to close any open {@link Connection} managed by this object. */ public void preReload() { tryClose(this); } /** * SAFE * * @param query Query to execute * @param ints ints to insert using {@link PreparedStatement#setInt(int, int)} * @return ResultSet * @see #executeQuery(String) * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is no way to close it. That may lead to severe memory leaks. Use {@link #executeQueryWithResult(String, Object...)} instead. */ @Deprecated public ResultSet safelyExecuteQuery(String query, int... ints) { try { PreparedStatement stmnt = this.prepareStatement(query); Validate.notNull(stmnt); int i = 1; for (int nr : ints) { stmnt.setInt(i, nr); i++; } return stmnt.executeQuery(); } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to safely execute a query (int): '" + query + "'"); } return null; } /** * SAFE * * @param query Query to execute * @param strings Strings to insert using {@link PreparedStatement#setString(int, String)} * @return ResultSet * @see #executeQuery(String) * @see #executeQueryWithResult(String, Object...) * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is no way to close it. That may lead to severe memory leaks. Use {@link #executeQueryWithResult(String, Object...)} instead. */ @Deprecated public ResultSet safelyExecuteQuery(String query, String... strings) { try { PreparedStatement stmnt = this.prepareStatement(query); Validate.notNull(stmnt); int i = 1; for (String str : strings) { stmnt.setString(i, str); i++; } return stmnt.executeQuery(); } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to safely execute a query (String): '" + query + "'"); } return null; } /** * Executes a query in the database by creating a {@link java.sql.PreparedStatement} and filling with the the given objects. * * @param query Query to execute (? is filled out with the corresponding {@code objects} value) * @param objects Objects to fill the statement with * @return A QueryResult representing the executed query ({@link QueryResult#getUpdateReturn()} will be {@code -1}). Remember to always close this! * @throws SQLException When an error occurs while creating the statement, executing the statement or filling in the values. */ public QueryResult executeQueryWithResult(String query, Object... objects) throws SQLException { PreparedStatement stmt = this.prepareStatement(query); fillStatement(stmt, objects); return new QueryResult(stmt, stmt.executeQuery()); } /** * Executes an update in the database by creating a {@link java.sql.PreparedStatement} and filling with the the given objects. * * @param query Update to execute (? is filled out with the corresponding {@code objects} value) * @param objects Objects to fill the statement with * @return A QueryResult representing the executed update ({@link QueryResult#getResultSet()} will be {@code null}). Remember to always close this! * @throws SQLException When an error occurs while creating the statement, executing the statement or filling in the values. * @deprecated Uses QueryResult instead of UpdateResult. Use {@link #executeUpdateWithGenKeys(String, Object...)}. */ @Deprecated public QueryResult executeUpdateWithResult(String query, Object... objects) throws SQLException { PreparedStatement stmt = this.prepareStatement(query); fillStatement(stmt, objects); return new QueryResult(stmt, stmt.executeUpdate()); } /** * Executes an update in the database by creating a {@link java.sql.PreparedStatement} and filling with the the given objects. * * @param query Update to execute (? is filled out with the corresponding {@code objects} value) * @param arguments Objects to fill the statement with * @return A QueryResult representing the executed update ({@link QueryResult#getResultSet()} will be {@code null}). Remember to always close this! * @throws SQLException When an error occurs while creating the statement, executing the statement or filling in the values. */ @Nonnull public UpdateResult executeUpdateWithGenKeys(@Nonnull String query, Object... arguments) throws SQLException { PreparedStatement stmt = this.getAnyConnection().prepareStatement(query, Statement.RETURN_GENERATED_KEYS); Validate.notNull(stmt); fillStatement(stmt, arguments); return new UpdateResult(stmt.executeUpdate(), stmt.getGeneratedKeys()); //TODO: this does not close the statement, help } @Nullable public PreparedStatement fillStatement(@Nullable PreparedStatement stmt, @Nonnull Object[] objects) throws SQLException { if (stmt == null){ return null; } for (int i = 0; i < objects.length; i++) { if (objects[i] == null){ stmt.setNull(i + 1, Types.OTHER); } else { stmt.setObject(i + 1, objects[i]); } } return stmt; } /** * SAFE * * @param query Update to execute * @param objects Objects to insert using {@link PreparedStatement#setString(int, String)} * @return int (see: {@link PreparedStatement#executeUpdate()} * @see #executeUpdate(String) */ public int safelyExecuteUpdate(String query, Object... objects) { try (PreparedStatement stmnt = this.prepareStatement(query)) { this.fillStatement(stmnt, objects); return stmnt.executeUpdate(); } catch (SQLException e) { this.formatAndPrintException(e, "§cException while trying to safely execute an update (String)!"); } return -1; } /** * Executes a set of updates for a given object type in a batch. Note that this can only operate on same objects and * same SQL statements. * * @param sql the SQL update or insert statement to fill with the parameters for each batch element * @param data a collection of objects representing the data to be written to the database * @param parameterMapper a mapper function mapping an object to the {@code sql} parameters representing it, in * declaration order. * @param <T> the type of object to be written to the database * @return an integer array, see {@link PreparedStatement#executeBatch()} * @throws SQLException if an error occurs while executing the batch */ public <T> int[] executeBatchUpdate(String sql, Collection<T> data, Function<T, Object[]> parameterMapper) throws SQLException { Preconditions.checkNotNull(data, "data"); Preconditions.checkNotNull(sql, "sql"); Preconditions.checkNotNull(parameterMapper, "parameterMapper"); Connection connection = getAnyConnection(); int[] count; try (PreparedStatement statement = Preconditions.checkNotNull(connection.prepareStatement(sql), "statement")) { connection.setAutoCommit(false); for (T t : data) { Object[] parameters = parameterMapper.apply(t); fillStatement(statement, parameters); statement.addBatch(); } count = statement.executeBatch(); } finally { connection.commit(); connection.setAutoCommit(true); } return count; } @Override public String toString() { String conStr; try { conStr = " [con:" + ((this.currentConnection.isClosed()) ? "" : "!") + "closed" + " @ " + this.currentConnection.getMetaData().getURL() + "]"; } catch (Exception e) { conStr = " [con: ~" + e.getClass().getName() + "~]"; } return getClass().getName() + "->" + this.authDataProvider.getSqlUser() + "@" + this.authDataProvider.getSqlHost() + "|" + this.authDataProvider.getSqlDb() + conStr; } @Override public void close() throws Exception { if (currentConnection != null){ currentConnection.close(); currentConnection = null; } } public Connection getCurrentConnection() { return this.currentConnection; } public void setCurrentConnection(Connection currentConnection) { this.currentConnection = currentConnection; } }
Add deprecation notices to SafeSql
core/src/main/java/li/l1t/common/sql/SafeSql.java
Add deprecation notices to SafeSql
<ide><path>ore/src/main/java/li/l1t/common/sql/SafeSql.java <ide> package li.l1t.common.sql; <ide> <ide> import com.google.common.base.Preconditions; <add>import li.l1t.common.util.Closer; <ide> import li.l1t.common.util.TextOutputHelper; <ide> import org.apache.commons.lang.Validate; <ide> import org.jetbrains.annotations.Nullable; <ide> <ide> /** <ide> * A class providing methods to connect to SQL databases. Requires sql driver to be in CLASSPATH. <add> * <p><b>Note:</b> Use of this class in new code is discouraged due to severe design flaws. Prefer <add> * {@link li.l1t.common.sql.sane.SaneSql} implementations for a well-defined API and concise method <add> * names as well as interfaces instead of concrete classes. Due to its widespread use all over <add> * multiple code bases, it has not been deprecated yet - the non-deprecated methods still work <add> * fine.</p> <ide> * <ide> * @author xxyy98 <add> * @since forever <ide> */ <ide> public class SafeSql implements AutoCloseable, PreparedStatementFactory { <ide> private static final Logger LOGGER = Logger.getLogger(SafeSql.class.getName()); <ide> /** <del> * A DEBUG switch to print every single query made by ANY {@link SafeSql} to {@link System#out}. <del> */ <add> * A DEBUG switch to print every single query made by ANY {@link SafeSql} to {@link <add> * System#out}. <add> * <add> * @deprecated bad architecture <add> */ <add> @Deprecated <ide> public static boolean debug = false; <ide> public final String dbName; <ide> /** <ide> * A logger to print errors to <del> */ <add> * <add> * @deprecated internal field that shouldn't have been exposed <add> */ <add> @Deprecated <ide> public Logger errLogger = null; <ide> /** <ide> * Connection maintained by this SafeSql. <ide> * <ide> * @param pl {@link SqlConnectable} providing login data <ide> * @throws IllegalArgumentException If plug is {@code null}. <del> */ <add> * @deprecated use {@link li.l1t.common.sql.sane.SaneSql} instead ({@link SafeSql more info}) <add> */ <add> @Deprecated <ide> public SafeSql(SqlConnectable pl) { <ide> Validate.notNull(pl); <ide> this.authDataProvider = pl; <ide> } <ide> <ide> /** <del> * Convenience method. Tries to close an {@link AutoCloseable}. If it could not be closed, logs the encountered exception. <add> * Convenience method. Tries to close an {@link AutoCloseable}. If it could not be closed, logs <add> * the encountered exception. <ide> * <ide> * @param closeable What to close <del> * @return {@code false} if an Exception occurred while closing {@code closeable}, {@code true} otherwise. <del> */ <add> * @return {@code false} if an Exception occurred while closing {@code closeable}, {@code true} <add> * otherwise. <add> * @deprecated Use {@link li.l1t.common.util.Closer#close(AutoCloseable)} <add> */ <add> @Deprecated <ide> public static boolean tryClose(AutoCloseable closeable) { <ide> if (closeable == null) { <ide> return true; <ide> * <ide> * @param query QUERY to execute <ide> * @return ResultSet <del> * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is no way to close it. That may lead to severe memory leaks. Use {@link #executeQueryWithResult(String, Object...)} instead. <add> * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is <add> * no way to close it. That may lead to severe memory leaks. Use {@link <add> * #executeQueryWithResult(String, Object...)} instead. <ide> */ <ide> @Deprecated <ide> public ResultSet executeQuery(String query) { <ide> } <ide> <ide> /** <del> * formats an exception, prints a line before it, then prints (to Ops &amp; console) &amp; logs it. <add> * formats an exception, prints a line before it, then prints (to Ops &amp; console) &amp; logs <add> * it. <ide> * <ide> * @param e Exception to use <del> * @param firstLine A line describing the error, normally class &amp; method name - more efficient than getting the caller <del> */ <add> * @param firstLine A line describing the error, normally class &amp; method name - more <add> * efficient than getting the caller <add> * @deprecated this is not the job of this class <add> */ <add> @Deprecated <ide> public void formatAndPrintException(SQLException e, String firstLine) { <ide> System.out.println(firstLine); <ide> System.out.println("§4SQL Error " + e.getErrorCode() + ": " + e.getLocalizedMessage()); <del> if (this.errLogger != null){ <add> if (this.errLogger != null) { <ide> System.out.printf("%s\nSQL ERROR: %s: %s", firstLine, e.getErrorCode(), e.getLocalizedMessage()); <ide> } <ide> e.printStackTrace(); <ide> } <ide> <ide> /** <del> * Sets up a connection, regardless if it's already established or not. <del> * Note that this does <b>NOT</b> set this object's connection field! <add> * Sets up a connection, regardless if it's already established or not. Note that this does <add> * <b>NOT</b> set this object's connection field! <ide> * <ide> * @return The created {@link java.sql.Connection} <del> */ <add> * @deprecated internal method that shouldn't have been exposed <add> */ <add> @Deprecated <ide> public Connection makeConnection() { <ide> Connection c = null; <ide> try { <ide> <ide> c = DriverManager.getConnection(sqlHost, this.authDataProvider.getSqlUser(), this.authDataProvider.getSqlPwd()); <ide> <del> if (c == null || !c.isValid(5)){ <add> if (c == null || !c.isValid(5)) { <ide> // CommandHelper.sendMessageToOpsAndConsole("§4§l[SEVERE] Could not establish database connection.");// Everybody panic. <ide> TextOutputHelper.printAndOrLog("[XYC] Connection to " + sqlHost + " failed.", this.errLogger, Level.SEVERE); <ide> } else { <ide> <ide> public Connection getAnyConnection() { <ide> try { <del> if (this.currentConnection == null || this.currentConnection.isClosed()){ <add> if (this.currentConnection == null || this.currentConnection.isClosed()) { <ide> this.currentConnection = makeConnection(); <ide> } <ide> } catch (SQLException e) { <ide> } <ide> <ide> /** <del> * Safely prepares a statement. Remember to close it afterwards. Insert values by using '?'. <del> * <p> <del> * Example: <del> * {@code <del> * PreparedStatement stmt = null; <del> * try{ <del> * stmt = safesql.prepareStatement("UPDATE "+safesql.dbName+".users SET status = 0 AND some_string = ? WHERE user_id = ?"); <del> * if(stmt == null) panic(); <del> * stmt.setString(1,"THAT'S SAFE");//just look at what you can't do now! <del> * stmt.setInt(2,42); <del> * stmt.executeUpdate(); <del> * }finally{ <del> * try{ if(stmt != null){ stmt.close(); } }catch{ logAndThenPanic(); } } <del> * }</p> <add> * Safely prepares a statement. Remember to close it afterwards. Insert values by using '?'. <p> <add> * Example: {@code PreparedStatement stmt = null; try{ stmt = safesql.prepareStatement("UPDATE <add> * "+safesql.dbName+".users SET status = 0 AND some_string = ? WHERE user_id = ?"); if(stmt == <add> * null) panic(); stmt.setString(1,"THAT'S SAFE");//just look at what you can't do now! <add> * stmt.setInt(2,42); stmt.executeUpdate(); }finally{ try{ if(stmt != null){ stmt.close(); } <add> * }catch{ logAndThenPanic(); } } }</p> <ide> * <ide> * @param query Query to prepare (may contain '?') <ide> * @return {@link PreparedStatement}; not executed OR null at failure <add> * @deprecated internal method that shouldn't have been exposed <ide> */ <ide> @Nonnull <add> @Deprecated <ide> public PreparedStatement prepareStatement(@Nonnull String query) throws SQLException { <ide> PreparedStatement stmt = this.getAnyConnection().prepareStatement(query); <ide> Validate.notNull(stmt); <ide> * Tries to close any open {@link Connection} managed by this object. <ide> */ <ide> public void preReload() { <del> tryClose(this); <add> Closer.close(this); <ide> } <ide> <ide> /** <ide> * @param ints ints to insert using {@link PreparedStatement#setInt(int, int)} <ide> * @return ResultSet <ide> * @see #executeQuery(String) <del> * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is no way to close it. That may lead to severe memory leaks. Use {@link #executeQueryWithResult(String, Object...)} instead. <add> * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is <add> * no way to close it. That may lead to severe memory leaks. Use {@link <add> * #executeQueryWithResult(String, Object...)} instead. <ide> */ <ide> @Deprecated <ide> public ResultSet safelyExecuteQuery(String query, int... ints) { <ide> * @return ResultSet <ide> * @see #executeQuery(String) <ide> * @see #executeQueryWithResult(String, Object...) <del> * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is no way to close it. That may lead to severe memory leaks. Use {@link #executeQueryWithResult(String, Object...)} instead. <add> * @deprecated This does not provide the created {@link java.sql.PreparedStatement} so there is <add> * no way to close it. That may lead to severe memory leaks. Use {@link <add> * #executeQueryWithResult(String, Object...)} instead. <ide> */ <ide> @Deprecated <ide> public ResultSet safelyExecuteQuery(String query, String... strings) { <ide> } <ide> <ide> /** <del> * Executes a query in the database by creating a {@link java.sql.PreparedStatement} and filling with the the given objects. <del> * <del> * @param query Query to execute (? is filled out with the corresponding {@code objects} value) <add> * Executes a query in the database by creating a {@link java.sql.PreparedStatement} and filling <add> * with the the given objects. <add> * <add> * @param query Query to execute (? is filled out with the corresponding {@code objects} <add> * value) <ide> * @param objects Objects to fill the statement with <del> * @return A QueryResult representing the executed query ({@link QueryResult#getUpdateReturn()} will be {@code -1}). Remember to always close this! <del> * @throws SQLException When an error occurs while creating the statement, executing the statement or filling in the values. <add> * @return A QueryResult representing the executed query ({@link QueryResult#getUpdateReturn()} <add> * will be {@code -1}). Remember to always close this! <add> * @throws SQLException When an error occurs while creating the statement, executing the <add> * statement or filling in the values. <ide> */ <ide> public QueryResult executeQueryWithResult(String query, Object... objects) throws SQLException { <ide> PreparedStatement stmt = this.prepareStatement(query); <ide> } <ide> <ide> /** <del> * Executes an update in the database by creating a {@link java.sql.PreparedStatement} and filling with the the given objects. <del> * <del> * @param query Update to execute (? is filled out with the corresponding {@code objects} value) <add> * Executes an update in the database by creating a {@link java.sql.PreparedStatement} and <add> * filling with the the given objects. <add> * <add> * @param query Update to execute (? is filled out with the corresponding {@code objects} <add> * value) <ide> * @param objects Objects to fill the statement with <del> * @return A QueryResult representing the executed update ({@link QueryResult#getResultSet()} will be {@code null}). Remember to always close this! <del> * @throws SQLException When an error occurs while creating the statement, executing the statement or filling in the values. <del> * @deprecated Uses QueryResult instead of UpdateResult. Use {@link #executeUpdateWithGenKeys(String, Object...)}. <add> * @return A QueryResult representing the executed update ({@link QueryResult#getResultSet()} <add> * will be {@code null}). Remember to always close this! <add> * @throws SQLException When an error occurs while creating the statement, executing the <add> * statement or filling in the values. <add> * @deprecated Uses QueryResult instead of UpdateResult. Use {@link #executeUpdateWithGenKeys(String, <add> * Object...)}. <ide> */ <ide> @Deprecated <ide> public QueryResult executeUpdateWithResult(String query, Object... objects) throws SQLException { <ide> } <ide> <ide> /** <del> * Executes an update in the database by creating a {@link java.sql.PreparedStatement} and filling with the the given objects. <del> * <del> * @param query Update to execute (? is filled out with the corresponding {@code objects} value) <add> * Executes an update in the database by creating a {@link java.sql.PreparedStatement} and <add> * filling with the the given objects. <add> * <add> * @param query Update to execute (? is filled out with the corresponding {@code objects} <add> * value) <ide> * @param arguments Objects to fill the statement with <del> * @return A QueryResult representing the executed update ({@link QueryResult#getResultSet()} will be {@code null}). Remember to always close this! <del> * @throws SQLException When an error occurs while creating the statement, executing the statement or filling in the values. <add> * @return A QueryResult representing the executed update ({@link QueryResult#getResultSet()} <add> * will be {@code null}). Remember to always close this! <add> * @throws SQLException When an error occurs while creating the statement, executing the <add> * statement or filling in the values. <ide> */ <ide> @Nonnull <ide> public UpdateResult executeUpdateWithGenKeys(@Nonnull String query, Object... arguments) throws SQLException { <ide> } <ide> <ide> @Nullable <add> @Deprecated //internal method <ide> public PreparedStatement fillStatement(@Nullable PreparedStatement stmt, @Nonnull Object[] objects) throws SQLException { <del> if (stmt == null){ <add> if (stmt == null) { <ide> return null; <ide> } <ide> <ide> for (int i = 0; i < objects.length; i++) { <del> if (objects[i] == null){ <add> if (objects[i] == null) { <ide> stmt.setNull(i + 1, Types.OTHER); <ide> } else { <ide> stmt.setObject(i + 1, objects[i]); <ide> } <ide> <ide> /** <del> * Executes a set of updates for a given object type in a batch. Note that this can only operate on same objects and <del> * same SQL statements. <del> * <del> * @param sql the SQL update or insert statement to fill with the parameters for each batch element <del> * @param data a collection of objects representing the data to be written to the database <del> * @param parameterMapper a mapper function mapping an object to the {@code sql} parameters representing it, in <del> * declaration order. <add> * Executes a set of updates for a given object type in a batch. Note that this can only operate <add> * on same objects and same SQL statements. <add> * <add> * @param sql the SQL update or insert statement to fill with the parameters for <add> * each batch element <add> * @param data a collection of objects representing the data to be written to the <add> * database <add> * @param parameterMapper a mapper function mapping an object to the {@code sql} parameters <add> * representing it, in declaration order. <ide> * @param <T> the type of object to be written to the database <ide> * @return an integer array, see {@link PreparedStatement#executeBatch()} <ide> * @throws SQLException if an error occurs while executing the batch <ide> <ide> @Override <ide> public void close() throws Exception { <del> if (currentConnection != null){ <add> if (currentConnection != null) { <ide> currentConnection.close(); <ide> currentConnection = null; <ide> } <ide> return this.currentConnection; <ide> } <ide> <add> @Deprecated //internal state <ide> public void setCurrentConnection(Connection currentConnection) { <ide> this.currentConnection = currentConnection; <ide> }
JavaScript
mit
4df5bd8e92d8530a5eb01e1a600e087823a52f86
0
mcansh/blog,mcansh/blog,mcansh/blog
import React, { Component, Fragment } from 'react'; import Router from 'next/router'; import NProgress from 'nprogress'; import PropTypes from 'prop-types'; import { ThemeProvider } from 'styled-components'; import Navigation from '../Navigation'; import colors from '../../theme'; import Footer from '../Footer'; import { version, repository } from '../../package.json'; import { initGA, logPageView } from '../../lib/analytics'; import withIntl from './withIntl'; NProgress.configure({ showSpinner: false }); Router.onRouteChangeStart = () => NProgress.start(); Router.onRouteChangeComplete = () => NProgress.done(); Router.onRouteChangeError = () => NProgress.done(); if (global.document) { const info = [ `Version: ${version}`, `You can find the code here: https://github.com/${repository}`, 'Thanks for stopping by ��', ]; // eslint-disable-next-line no-console info.forEach(message => console.log(message)); } class Document extends Component { componentDidMount() { if (process.env.NODE_ENV === 'production' && !window.GA_INITIALIZED) { initGA(); window.GA_INITIALIZED = true; } logPageView(); } render() { const { children } = this.props; return ( <ThemeProvider theme={colors}> <Fragment> <Navigation /> {children} <Footer /> <style jsx global>{` * { box-sizing: border-box; margin: 0; } html { font-size: 10px; } body { font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol'; font-weight: 400; margin: 0; background: ${colors.background}; } ::selection { background: ${colors.primary}; color: white; } a { color: ${colors.primary}; text-decoration-skip: ink; transition: 300ms all ease-in-out; } a:hover { color: ${colors.secondary}; } a::selection { color: white; } #nprogress { pointer-events: none; } #nprogress .bar { background: ${colors.primary}; position: fixed; z-index: 1031; top: 0; left: 0; width: 100%; height: 0.2rem; } #nprogress { pointer-events: none; } #nprogress .bar { background: ${colors.primary}; position: fixed; z-index: 1031; top: 0; left: 0; width: 100%; height: 0.2rem; } #nprogress .peg { display: block; position: absolute; right: 0; width: 10rem; height: 100%; box-shadow: 0 0 1rem ${colors.primary}, 0 0 0.5rem ${colors.primary}; opacity: 1; transform: rotate(3deg) translate(0, -0.4rem); } .nprogress-custom-parent { overflow: hidden; position: relative; } .nprogress-custom-parent #nprogress .spinner, .nprogress-custom-parent #nprogress .bar { position: absolute; } `}</style> </Fragment> </ThemeProvider> ); } } Document.propTypes = { children: PropTypes.node.isRequired }; export default withIntl(Document);
components/layouts/Document.js
import React, { Component, Fragment } from 'react'; import Router from 'next/router'; import NProgress from 'nprogress'; import PropTypes from 'prop-types'; import { ThemeProvider } from 'styled-components'; import Navigation from '../Navigation'; import colors from '../../theme'; import Footer from '../Footer'; import { version } from '../../package.json'; import { initGA, logPageView } from '../../lib/analytics'; import withIntl from './withIntl'; NProgress.configure({ showSpinner: false }); Router.onRouteChangeStart = () => NProgress.start(); Router.onRouteChangeComplete = () => NProgress.done(); Router.onRouteChangeError = () => NProgress.done(); if (global.document) { const info = [ `Version: ${version}`, 'You can find the code here: https://github.com/mcansh/blog', 'Thanks for stopping by ��', ]; // eslint-disable-next-line no-console info.forEach(message => console.log(message)); } class Document extends Component { componentDidMount() { if (process.env.NODE_ENV === 'production' && !window.GA_INITIALIZED) { initGA(); window.GA_INITIALIZED = true; } logPageView(); } render() { const { children } = this.props; return ( <ThemeProvider theme={colors}> <Fragment> <Navigation /> {children} <Footer /> <style jsx global>{` * { box-sizing: border-box; margin: 0; } html { font-size: 10px; } body { font-family: system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol'; font-weight: 400; margin: 0; background: ${colors.background}; } ::selection { background: ${colors.primary}; color: white; } a { color: ${colors.primary}; text-decoration-skip: ink; transition: 300ms all ease-in-out; } a:hover { color: ${colors.secondary}; } a::selection { color: white; } #nprogress { pointer-events: none; } #nprogress .bar { background: ${colors.primary}; position: fixed; z-index: 1031; top: 0; left: 0; width: 100%; height: 0.2rem; } #nprogress { pointer-events: none; } #nprogress .bar { background: ${colors.primary}; position: fixed; z-index: 1031; top: 0; left: 0; width: 100%; height: 0.2rem; } #nprogress .peg { display: block; position: absolute; right: 0; width: 10rem; height: 100%; box-shadow: 0 0 1rem ${colors.primary}, 0 0 0.5rem ${colors.primary}; opacity: 1; transform: rotate(3deg) translate(0, -0.4rem); } .nprogress-custom-parent { overflow: hidden; position: relative; } .nprogress-custom-parent #nprogress .spinner, .nprogress-custom-parent #nprogress .bar { position: absolute; } `}</style> </Fragment> </ThemeProvider> ); } } Document.propTypes = { children: PropTypes.node.isRequired }; export default withIntl(Document);
pull repository from package.json Signed-off-by: Logan McAnsh <[email protected]>
components/layouts/Document.js
pull repository from package.json
<ide><path>omponents/layouts/Document.js <ide> import Navigation from '../Navigation'; <ide> import colors from '../../theme'; <ide> import Footer from '../Footer'; <del>import { version } from '../../package.json'; <add>import { version, repository } from '../../package.json'; <ide> import { initGA, logPageView } from '../../lib/analytics'; <ide> import withIntl from './withIntl'; <ide> <ide> if (global.document) { <ide> const info = [ <ide> `Version: ${version}`, <del> 'You can find the code here: https://github.com/mcansh/blog', <add> `You can find the code here: https://github.com/${repository}`, <ide> 'Thanks for stopping by ��', <ide> ]; <ide> // eslint-disable-next-line no-console
Java
mit
e295f084426ad40f6212ab602778a6de26156cd6
0
nls-oskari/oskari-server,nls-oskari/oskari-server,nls-oskari/oskari-server
package fi.nls.oskari.control.statistics; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.geotools.GML; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.feature.DefaultFeatureCollection; import org.geotools.feature.FeatureIterator; import org.geotools.geojson.feature.FeatureJSON; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.json.JSONException; import org.json.JSONObject; import org.opengis.feature.simple.SimpleFeature; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.oskari.geojson.GeoJSON; import org.oskari.geojson.GeoJSONWriter; import com.vividsolutions.jts.geom.Geometry; import fi.nls.oskari.control.statistics.db.RegionSet; import fi.nls.oskari.domain.geo.Point; import fi.nls.oskari.log.LogFactory; import fi.nls.oskari.log.Logger; import fi.nls.oskari.service.ServiceException; import fi.nls.oskari.service.ServiceRuntimeException; import fi.nls.oskari.util.IOHelper; import fi.nls.oskari.util.JSONHelper; public class RegionSetHelper { private static final Logger LOG = LogFactory.getLogger(RegionSetService.class); private static final String RESOURCES_URL_PREFIX = "resources://"; private static final FeatureJSON FJ = new FeatureJSON(); public static List<Region> getRegions(RegionSet regionset, String requestedSRS) throws FactoryException, MismatchedDimensionException, TransformException, ServiceException, IOException { SimpleFeatureCollection fc = getFeatureCollection(regionset, requestedSRS); final String propId = regionset.getIdProperty(); final String propName = regionset.getNameProperty(); return parse(fc, propId, propName); } protected static SimpleFeatureCollection getFeatureCollection(RegionSet regionset, String requestedSRS) throws FactoryException, MismatchedDimensionException, TransformException, ServiceException, IOException { String url = regionset.getFeaturesUrl(); if (url.startsWith(RESOURCES_URL_PREFIX)) { return getRegionsResources(regionset, requestedSRS); } else { return getRegionsWFS(regionset, requestedSRS); } } protected static SimpleFeatureCollection getRegionsResources(RegionSet regionset, String requestedSRS) throws IOException, MismatchedDimensionException, TransformException, FactoryException { String url = regionset.getFeaturesUrl(); String path = url.substring(RESOURCES_URL_PREFIX.length()); if (path.toLowerCase().endsWith(".json")) { return getRegionsResourcesGeoJSON(regionset, requestedSRS, path); } throw new IllegalArgumentException("Invalid resource file format"); } /** * Read (Simple)FeatureCollection from GeoJSON resource file * transforming geometries to the requestedSRS */ protected static SimpleFeatureCollection getRegionsResourcesGeoJSON(RegionSet regionset, String requestedSRS, String path) throws IOException, MismatchedDimensionException, TransformException, FactoryException { MathTransform transform = findMathTransform(regionset.getSrs_name(), requestedSRS); LOG.debug("Trying to read GeoJSON resource file from:", path); DefaultFeatureCollection fc = new DefaultFeatureCollection(); try (InputStream in = RegionSetHelper.class.getClassLoader().getResourceAsStream(path)) { if (in == null) { LOG.warn("Could not find resource for path:", path); throw new NullPointerException(); } try (FeatureIterator<SimpleFeature> it = FJ.streamFeatureCollection(in)) { while (it.hasNext()) { SimpleFeature f = it.next(); transform(f, transform); fc.add(f); } } } return fc; } protected static MathTransform findMathTransform(String from, String to) throws FactoryException { if (from.equals(to)) { return null; } CoordinateReferenceSystem sourceCRS = CRS.decode(from); CoordinateReferenceSystem targetCRS = CRS.decode(to); return CRS.findMathTransform(sourceCRS, targetCRS, true); } protected static void transform(SimpleFeature f, MathTransform transform) throws MismatchedDimensionException, TransformException { if (transform != null) { Object geometry = f.getDefaultGeometry(); if (geometry != null && geometry instanceof Geometry) { JTS.transform((Geometry) geometry, transform); } } } protected static SimpleFeatureCollection getRegionsWFS(RegionSet regionset, String requestedSRS) throws ServiceException, IOException { // For example: http://localhost:8080/geoserver/wfs?service=wfs&version=1.1.0&request=GetFeature&typeNames=oskari:kunnat2013 Map<String, String> params = new HashMap<>(); params.put("service", "wfs"); params.put("version", "1.1.0"); params.put("request", "GetFeature"); params.put("typeName", regionset.getName()); params.put("srsName", requestedSRS); final String url = IOHelper.constructUrl(regionset.getFeaturesUrl(), params); final HttpURLConnection connection = IOHelper.getConnection(url); try (InputStream in = new BufferedInputStream(connection.getInputStream())) { return parseGMLFeatureCollection(in); } } protected static SimpleFeatureCollection parseGMLFeatureCollection(InputStream inputStream) { try { GML gml = new GML(GML.Version.GML3); return gml.decodeFeatureCollection(inputStream); } catch (Exception ex) { throw new ServiceRuntimeException("Couldn't parse response to feature collection", ex); } } protected static List<Region> parse(SimpleFeatureCollection fc, String idProperty, String nameProperty) throws ServiceException { final SimpleFeatureIterator it = fc.features(); try { final List<Region> nameCodes = new ArrayList<>(); while (it.hasNext()) { final SimpleFeature feature = it.next(); final String id = (String) feature.getAttribute(idProperty); final String name = (String) feature.getAttribute(nameProperty); if (id == null || name == null) { LOG.warn("Couldn't find id (", idProperty, ") and/or name(", nameProperty, ") property for region. Properties are:", LOG.getAsString(feature.getProperties())); continue; } Region region = new Region(id, name); try { region.setPointOnSurface(getPointOnSurface(feature)); region.setGeojson(toGeoJSON((Geometry) feature.getDefaultGeometry(), id, name)); nameCodes.add(region); } catch (Exception ex) { LOG.warn("Region had invalid geometry:", region, "Error:", ex.getMessage()); } } if (nameCodes.isEmpty()) { throw new ServiceException("Empty result, check configuration for region id-property=" + idProperty + " and name-property=" + nameProperty); } return nameCodes; } finally { it.close(); } } protected static JSONObject toGeoJSON(Geometry geom, String id, String name) throws JSONException { JSONObject feature = new JSONObject(); JSONHelper.putValue(feature, GeoJSON.TYPE, GeoJSON.FEATURE); JSONObject geometry = new GeoJSONWriter().writeGeometry(geom); JSONHelper.putValue(feature, GeoJSON.GEOMETRY, geometry); JSONObject properties = new JSONObject(); JSONHelper.putValue(properties, Region.KEY_CODE, id); JSONHelper.putValue(properties, Region.KEY_NAME, name); JSONHelper.putValue(feature, GeoJSON.PROPERTIES, properties); return feature; } private static Point getPointOnSurface(SimpleFeature feature) { Geometry geometry = (Geometry)feature.getDefaultGeometry(); // " An interior point is guaranteed to lie in the interior of the Geometry, if it possible to // calculate such a point exactly. Otherwise, the point may lie on the boundary of the geometry." com.vividsolutions.jts.geom.Point pos = geometry.getInteriorPoint(); return new Point(pos.getX(), pos.getY()); } }
service-statistics-common/src/main/java/fi/nls/oskari/control/statistics/RegionSetHelper.java
package fi.nls.oskari.control.statistics; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.geotools.GML; import org.geotools.data.simple.SimpleFeatureCollection; import org.geotools.data.simple.SimpleFeatureIterator; import org.geotools.feature.DefaultFeatureCollection; import org.geotools.feature.FeatureIterator; import org.geotools.geojson.feature.FeatureJSON; import org.geotools.geometry.jts.JTS; import org.geotools.referencing.CRS; import org.json.JSONException; import org.json.JSONObject; import org.opengis.feature.simple.SimpleFeature; import org.opengis.geometry.MismatchedDimensionException; import org.opengis.referencing.FactoryException; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.MathTransform; import org.opengis.referencing.operation.TransformException; import org.oskari.geojson.GeoJSON; import org.oskari.geojson.GeoJSONWriter; import com.vividsolutions.jts.geom.Geometry; import fi.nls.oskari.control.statistics.db.RegionSet; import fi.nls.oskari.domain.geo.Point; import fi.nls.oskari.log.LogFactory; import fi.nls.oskari.log.Logger; import fi.nls.oskari.service.ServiceException; import fi.nls.oskari.service.ServiceRuntimeException; import fi.nls.oskari.util.IOHelper; import fi.nls.oskari.util.JSONHelper; public class RegionSetHelper { private static final Logger LOG = LogFactory.getLogger(RegionSetService.class); private static final String RESOURCES_URL_PREFIX = "resources://"; private static final FeatureJSON FJ = new FeatureJSON(); public static List<Region> getRegions(RegionSet regionset, String requestedSRS) throws FactoryException, MismatchedDimensionException, TransformException, ServiceException, IOException { SimpleFeatureCollection fc = getFeatureCollection(regionset, requestedSRS); final String propId = regionset.getIdProperty(); final String propName = regionset.getNameProperty(); return parse(fc, propId, propName); } protected static SimpleFeatureCollection getFeatureCollection(RegionSet regionset, String requestedSRS) throws FactoryException, MismatchedDimensionException, TransformException, ServiceException, IOException { String url = regionset.getFeaturesUrl(); if (url.startsWith(RESOURCES_URL_PREFIX)) { return getRegionsResources(regionset, requestedSRS); } else { return getRegionsWFS(regionset, requestedSRS); } } protected static SimpleFeatureCollection getRegionsResources(RegionSet regionset, String requestedSRS) throws IOException, MismatchedDimensionException, TransformException, FactoryException { String url = regionset.getFeaturesUrl(); String path = url.substring(RESOURCES_URL_PREFIX.length()); if (path.toLowerCase().endsWith(".json")) { return getRegionsResourcesGeoJSON(regionset, requestedSRS, path); } throw new IllegalArgumentException("Invalid resource file format"); } /** * Read (Simple)FeatureCollection from GeoJSON resource file * transforming geometries to the requestedSRS */ protected static SimpleFeatureCollection getRegionsResourcesGeoJSON(RegionSet regionset, String requestedSRS, String path) throws IOException, MismatchedDimensionException, TransformException, FactoryException { MathTransform transform = findMathTransform(regionset.getSrs_name(), requestedSRS); LOG.debug("Trying to read GeoJSON resource file from:", path); DefaultFeatureCollection fc = new DefaultFeatureCollection(); try (InputStream in = RegionSetHelper.class.getResourceAsStream(path)) { if (in == null) { LOG.warn("Could not find resource for path:", path); throw new NullPointerException(); } try (FeatureIterator<SimpleFeature> it = FJ.streamFeatureCollection(in)) { while (it.hasNext()) { SimpleFeature f = it.next(); transform(f, transform); fc.add(f); } } } return fc; } protected static MathTransform findMathTransform(String from, String to) throws FactoryException { if (from.equals(to)) { return null; } CoordinateReferenceSystem sourceCRS = CRS.decode(from); CoordinateReferenceSystem targetCRS = CRS.decode(to); return CRS.findMathTransform(sourceCRS, targetCRS, true); } protected static void transform(SimpleFeature f, MathTransform transform) throws MismatchedDimensionException, TransformException { if (transform != null) { Object geometry = f.getDefaultGeometry(); if (geometry != null && geometry instanceof Geometry) { JTS.transform((Geometry) geometry, transform); } } } protected static SimpleFeatureCollection getRegionsWFS(RegionSet regionset, String requestedSRS) throws ServiceException, IOException { // For example: http://localhost:8080/geoserver/wfs?service=wfs&version=1.1.0&request=GetFeature&typeNames=oskari:kunnat2013 Map<String, String> params = new HashMap<>(); params.put("service", "wfs"); params.put("version", "1.1.0"); params.put("request", "GetFeature"); params.put("typeName", regionset.getName()); params.put("srsName", requestedSRS); final String url = IOHelper.constructUrl(regionset.getFeaturesUrl(), params); final HttpURLConnection connection = IOHelper.getConnection(url); try (InputStream in = new BufferedInputStream(connection.getInputStream())) { return parseGMLFeatureCollection(in); } } protected static SimpleFeatureCollection parseGMLFeatureCollection(InputStream inputStream) { try { GML gml = new GML(GML.Version.GML3); return gml.decodeFeatureCollection(inputStream); } catch (Exception ex) { throw new ServiceRuntimeException("Couldn't parse response to feature collection", ex); } } protected static List<Region> parse(SimpleFeatureCollection fc, String idProperty, String nameProperty) throws ServiceException { final SimpleFeatureIterator it = fc.features(); try { final List<Region> nameCodes = new ArrayList<>(); while (it.hasNext()) { final SimpleFeature feature = it.next(); final String id = (String) feature.getAttribute(idProperty); final String name = (String) feature.getAttribute(nameProperty); if (id == null || name == null) { LOG.warn("Couldn't find id (", idProperty, ") and/or name(", nameProperty, ") property for region. Properties are:", LOG.getAsString(feature.getProperties())); continue; } Region region = new Region(id, name); try { region.setPointOnSurface(getPointOnSurface(feature)); region.setGeojson(toGeoJSON((Geometry) feature.getDefaultGeometry(), id, name)); nameCodes.add(region); } catch (Exception ex) { LOG.warn("Region had invalid geometry:", region, "Error:", ex.getMessage()); } } if (nameCodes.isEmpty()) { throw new ServiceException("Empty result, check configuration for region id-property=" + idProperty + " and name-property=" + nameProperty); } return nameCodes; } finally { it.close(); } } protected static JSONObject toGeoJSON(Geometry geom, String id, String name) throws JSONException { JSONObject feature = new JSONObject(); JSONHelper.putValue(feature, GeoJSON.TYPE, GeoJSON.FEATURE); JSONObject geometry = new GeoJSONWriter().writeGeometry(geom); JSONHelper.putValue(feature, GeoJSON.GEOMETRY, geometry); JSONObject properties = new JSONObject(); JSONHelper.putValue(properties, Region.KEY_CODE, id); JSONHelper.putValue(properties, Region.KEY_NAME, name); JSONHelper.putValue(feature, GeoJSON.PROPERTIES, properties); return feature; } private static Point getPointOnSurface(SimpleFeature feature) { Geometry geometry = (Geometry)feature.getDefaultGeometry(); // " An interior point is guaranteed to lie in the interior of the Geometry, if it possible to // calculate such a point exactly. Otherwise, the point may lie on the boundary of the geometry." com.vividsolutions.jts.geom.Point pos = geometry.getInteriorPoint(); return new Point(pos.getX(), pos.getY()); } }
Try classloader
service-statistics-common/src/main/java/fi/nls/oskari/control/statistics/RegionSetHelper.java
Try classloader
<ide><path>ervice-statistics-common/src/main/java/fi/nls/oskari/control/statistics/RegionSetHelper.java <ide> MathTransform transform = findMathTransform(regionset.getSrs_name(), requestedSRS); <ide> LOG.debug("Trying to read GeoJSON resource file from:", path); <ide> DefaultFeatureCollection fc = new DefaultFeatureCollection(); <del> try (InputStream in = RegionSetHelper.class.getResourceAsStream(path)) { <add> try (InputStream in = RegionSetHelper.class.getClassLoader().getResourceAsStream(path)) { <ide> if (in == null) { <ide> LOG.warn("Could not find resource for path:", path); <ide> throw new NullPointerException();
Java
apache-2.0
933aa6f5406455befd313e35f3d4e1a04713e378
0
marssa/footprint
package mise.marssa.footprint.datatypes.composite; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Table; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import mise.marssa.footprint.datatypes.MString; import mise.marssa.footprint.datatypes.TypeFactory; import mise.marssa.footprint.logger.MMarker; import org.hibernate.annotations.GenericGenerator; import org.slf4j.LoggerFactory; import ch.qos.logback.classic.Logger; import flexjson.JSON; import flexjson.JSONSerializer; /** * @author Alan Grech * @version 1.0 * @created 08-Jul-2011 09:53:24 */ @XmlType(name = "Coordinate", factoryClass = TypeFactory.class, factoryMethod = "getCoordinateInstance") @Entity @Table(name="Coordinate") public class Coordinate { private static Logger Coordinate = (Logger) LoggerFactory.getLogger("Coordinate"); @Column(name="Latitude") private Latitude latitude; @Column(name="Latitude") private Longitude longitude; public Coordinate(Latitude latitude, Longitude longitude) { this.latitude = latitude; this.longitude = longitude; Coordinate.trace(MMarker.CONSTRUCTOR,"Constructor created with Latitude:\"{}\", Longitude:\"{}\"", latitude.toString(),longitude.toString()); } public void finalize() throws Throwable { } @JSON @XmlElement public Latitude getLatitude() { Coordinate.trace(MMarker.GETTER,"Getting Latitude: {}.",latitude.toString()); return latitude; } @JSON @XmlElement public Longitude getLongitude() { Coordinate.trace(MMarker.GETTER,"Getting Longitude: {}.",longitude.toString()); return longitude; } @Id @Column(name = "id") @GeneratedValue(generator="increment") @GenericGenerator(name="increment", strategy = "increment") Long id; public Long getId() { return id; } private void setId(Long id) { this.id = id; } public java.lang.String toString() { Coordinate.trace(MMarker.GETTER,"Getting Coordinate as a String"); return "[" + latitude.toString() + ", " + longitude.toString() + "]"; } public MString toJSON(){ MString JSON = new MString(new JSONSerializer().deepSerialize(this)); return JSON; } }
src/main/java/mise/marssa/footprint/datatypes/composite/Coordinate.java
package mise.marssa.footprint.datatypes.composite; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import org.slf4j.LoggerFactory; import ch.qos.logback.classic.Logger; import mise.marssa.footprint.datatypes.MString; import mise.marssa.footprint.datatypes.TypeFactory; import mise.marssa.footprint.logger.MMarker; import flexjson.JSON; import flexjson.JSONSerializer; /** * @author Alan Grech * @version 1.0 * @created 08-Jul-2011 09:53:24 */ @XmlType(name = "Coordinate", factoryClass = TypeFactory.class, factoryMethod = "getCoordinateInstance") public class Coordinate { private static Logger Coordinate = (Logger) LoggerFactory.getLogger("Coordinate"); private Latitude latitude; private Longitude longitude; public Coordinate(Latitude latitude, Longitude longitude) { this.latitude = latitude; this.longitude = longitude; Coordinate.trace(MMarker.CONSTRUCTOR,"Constructor created with Latitude:\"{}\", Longitude:\"{}\"", latitude.toString(),longitude.toString()); } public void finalize() throws Throwable { } @JSON @XmlElement public Latitude getLatitude() { Coordinate.trace(MMarker.GETTER,"Getting Latitude: {}.",latitude.toString()); return latitude; } @JSON @XmlElement public Longitude getLongitude() { Coordinate.trace(MMarker.GETTER,"Getting Longitude: {}.",longitude.toString()); return longitude; } public java.lang.String toString() { Coordinate.trace(MMarker.GETTER,"Getting Coordinate as a String"); return "[" + latitude.toString() + ", " + longitude.toString() + "]"; } public MString toJSON(){ MString JSON = new MString(new JSONSerializer().deepSerialize(this)); return JSON; } }
Annotations for Coordinate.java & Latitude.java
src/main/java/mise/marssa/footprint/datatypes/composite/Coordinate.java
Annotations for Coordinate.java & Latitude.java
<ide><path>rc/main/java/mise/marssa/footprint/datatypes/composite/Coordinate.java <ide> package mise.marssa.footprint.datatypes.composite; <ide> <add>import javax.persistence.Column; <add>import javax.persistence.Entity; <add>import javax.persistence.GeneratedValue; <add>import javax.persistence.Id; <add>import javax.persistence.Table; <ide> import javax.xml.bind.annotation.XmlElement; <ide> import javax.xml.bind.annotation.XmlType; <del> <del>import org.slf4j.LoggerFactory; <del> <del>import ch.qos.logback.classic.Logger; <ide> <ide> import mise.marssa.footprint.datatypes.MString; <ide> import mise.marssa.footprint.datatypes.TypeFactory; <ide> import mise.marssa.footprint.logger.MMarker; <add> <add>import org.hibernate.annotations.GenericGenerator; <add>import org.slf4j.LoggerFactory; <add> <add>import ch.qos.logback.classic.Logger; <ide> import flexjson.JSON; <ide> import flexjson.JSONSerializer; <ide> <ide> * @created 08-Jul-2011 09:53:24 <ide> */ <ide> @XmlType(name = "Coordinate", factoryClass = TypeFactory.class, factoryMethod = "getCoordinateInstance") <add>@Entity <add>@Table(name="Coordinate") <ide> public class Coordinate { <ide> <ide> private static Logger Coordinate = (Logger) LoggerFactory.getLogger("Coordinate"); <add> @Column(name="Latitude") <ide> private Latitude latitude; <add> @Column(name="Latitude") <ide> private Longitude longitude; <ide> <ide> public Coordinate(Latitude latitude, Longitude longitude) { <ide> Coordinate.trace(MMarker.GETTER,"Getting Latitude: {}.",latitude.toString()); <ide> return latitude; <ide> } <add> <ide> @JSON <ide> @XmlElement <ide> public Longitude getLongitude() { <ide> Coordinate.trace(MMarker.GETTER,"Getting Longitude: {}.",longitude.toString()); <ide> return longitude; <add> } <add> <add> @Id <add> @Column(name = "id") <add> @GeneratedValue(generator="increment") <add> @GenericGenerator(name="increment", strategy = "increment") <add> Long id; <add> <add> public Long getId() { <add> return id; <add> } <add> <add> private void setId(Long id) { <add> this.id = id; <ide> } <ide> <ide> public java.lang.String toString() {
Java
mit
4cc7e194481b05634c9c8f511ebab3f51bcd0610
0
jenkinsci/tikal-multijob-plugin,Brantone/tikal-multijob-plugin,sshelomentsev/tikal-multijob-plugin,oleg-nenashev/tikal-multijob-plugin,arpitgold/tikal-multijob-plugin,Brantone/tikal-multijob-plugin,jenkinsci/tikal-multijob-plugin,cohencil/tikal-multijob-plugin,oleg-nenashev/tikal-multijob-plugin,sschuberth/tikal-multijob-plugin,harcher81/tikal-multijob-plugin,arpitgold/tikal-multijob-plugin,sschuberth/tikal-multijob-plugin,rrialq/tikal-multijob-plugin,cohencil/tikal-multijob-plugin,sshelomentsev/tikal-multijob-plugin,harcher81/tikal-multijob-plugin,rrialq/tikal-multijob-plugin
package com.tikal.jenkins.plugins.multijob; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.Util; import hudson.console.HyperlinkNote; import hudson.model.Action; import hudson.model.BallColor; import hudson.model.Build; import hudson.model.BuildListener; import hudson.model.DependecyDeclarer; import hudson.model.DependencyGraph; import hudson.model.DependencyGraph.Dependency; import hudson.model.Item; import hudson.model.Result; import hudson.model.TaskListener; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Cause.UpstreamCause; import hudson.model.Run; import hudson.model.queue.QueueTaskFuture; import hudson.scm.ChangeLogSet; import hudson.scm.ChangeLogSet.Entry; import hudson.scm.SCM; import hudson.scm.SCMRevisionState; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import net.sf.json.JSONObject; import jenkins.model.Jenkins; import org.jenkinsci.lib.envinject.EnvInjectLogger; import org.jenkinsci.plugins.envinject.EnvInjectBuilderContributionAction; import org.jenkinsci.plugins.envinject.EnvInjectBuilder; import org.jenkinsci.plugins.envinject.service.EnvInjectActionSetter; import org.jenkinsci.plugins.envinject.service.EnvInjectEnvVars; import org.jenkinsci.plugins.envinject.service.EnvInjectVariableGetter; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.StaplerRequest; import com.tikal.jenkins.plugins.multijob.MultiJobBuild.SubBuild; import com.tikal.jenkins.plugins.multijob.PhaseJobsConfig.KillPhaseOnJobResultCondition; public class MultiJobBuilder extends Builder implements DependecyDeclarer { /** * The name of the parameter in the build.getBuildVariables() to enable the job build, regardless * of scm changes. */ public static final String BUILD_ALWAYS_KEY = "hudson.scm.multijob.build.always"; /** * List of messages to show to show by console. */ private static final String[] TRIGGER_MESSAGES = { " >> [%s] has changes since last build. Adding to build queue.", " >> [%s] has no changes since last build, but it will be adding to build queue.", " >> [%s] has no changes since last build, but you have enabled the 'build always' function. Adding to build queue.", " >> [%s] has no changes since last build, so it will be skipped." }; private String phaseName; private List<PhaseJobsConfig> phaseJobs; private ContinuationCondition continuationCondition = ContinuationCondition.SUCCESSFUL; @DataBoundConstructor public MultiJobBuilder(String phaseName, List<PhaseJobsConfig> phaseJobs, ContinuationCondition continuationCondition) { this.phaseName = phaseName; this.phaseJobs = Util.fixNull(phaseJobs); this.continuationCondition = continuationCondition; } @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { Jenkins jenkins = Jenkins.getInstance(); MultiJobBuild multiJobBuild = (MultiJobBuild) build; MultiJobProject thisProject = multiJobBuild.getProject(); Map<PhaseSubJob, PhaseJobsConfig> phaseSubJobs = new HashMap<PhaseSubJob, PhaseJobsConfig>( phaseJobs.size()); for (PhaseJobsConfig phaseJobConfig : phaseJobs) { Item item = jenkins.getItemByFullName(phaseJobConfig.getJobName()); if (item instanceof AbstractProject) { AbstractProject job = (AbstractProject) item; phaseSubJobs.put(new PhaseSubJob(job), phaseJobConfig); } } List<SubTask> subTasks = new ArrayList<SubTask>(); for (PhaseSubJob phaseSubJob : phaseSubJobs.keySet()) { AbstractProject subJob = phaseSubJob.job; if (subJob.isDisabled()) { listener.getLogger().println( String.format( "Skipping %s. This Job has been disabled.", subJob.getName())); continue; } /* Hope this works!!! If the build contains the variable named hudson.scm.multijob.force.build.always with a value of "true", we force the build always. Useful to overwrite the subJob.poll(...).hasChanges() value. When to build VALUES hasChanges Y Y Y Y N N N N buildOnly Y Y N N Y Y N N buildAlways Y N Y N Y N Y N --------------------------------------------------------------------------- Build Y Y Y Y Y N Y Y hasChanges Y ==> build ==> Y N buildAlways N ==> build ==> N otherwise ==> Y -------------------------- If the job has SCM changes then message = 0 ==> Add to queue If the job has no SCM changes: If !buildOnlyIfSCMChanges, then message = 1 ==> No buildOnly, add to queue If buildAlways, then message = 2 ==> No SCM changes, but forced to build. If !buildAlways, then message = 3 ==> No SCM changes, not forced to build. Skipped. */ final SCM scm = subJob.getScm(); final SCMRevisionState scmRS = scm.calcRevisionsFromBuild((AbstractBuild) subJob.getLastBuild(), launcher, listener); final boolean hasChanges = scm.poll(subJob, launcher, subJob.getWorkspace(), listener, scmRS).hasChanges(); final PhaseJobsConfig phaseConfig = phaseSubJobs.get(phaseSubJob); final boolean buildOnlyIfSCMChanges = phaseConfig.isBuildOnlyIfSCMChanges(); final boolean buildAlways = Boolean.valueOf(build.getBuildVariables().get(BUILD_ALWAYS_KEY)); final int message = (hasChanges) ? 0 : (!buildOnlyIfSCMChanges ? 1 : ((buildAlways) ? 2 : 3) ); listener.getLogger().println(String.format(TRIGGER_MESSAGES[message], subJob.getName())); if (message == 3) { continue; } reportStart(listener, subJob); List<Action> actions = new ArrayList<Action>(); prepareActions(multiJobBuild, subJob, phaseConfig, listener, actions); while (subJob.isInQueue()) { TimeUnit.SECONDS.sleep(subJob.getQuietPeriod()); } Future<AbstractBuild> future = null; if (!phaseConfig.isDisableJob()) { future = subJob.scheduleBuild2(subJob.getQuietPeriod(), new UpstreamCause((Run) multiJobBuild), actions.toArray(new Action[0])); } if (future != null) { subTasks.add(new SubTask(future, phaseConfig)); } else { listener.getLogger().println( String.format("Warning: %s sub job is disabled.", subJob.getName())); } } if (subTasks.size() < 1) return true; ExecutorService executor = Executors .newFixedThreadPool(subTasks.size()); Set<Result> jobResults = new HashSet<Result>(); BlockingQueue<SubTask> queue = new ArrayBlockingQueue<SubTask>( subTasks.size()); for (SubTask subTask : subTasks) { Runnable worker = new SubJobWorker(thisProject, multiJobBuild, listener, subTask, queue); executor.execute(worker); } executor.shutdown(); int resultCounter = 0; while (!executor.isTerminated()) { SubTask subTask = queue.take(); resultCounter++; if (subTask.result != null) { jobResults.add(subTask.result); checkPhaseTermination(subTask, subTasks); } if (subTasks.size() == resultCounter) break; } executor.shutdownNow(); for (Result result : jobResults) { if (!continuationCondition.isContinue(result)) { return false; } } return true; } private final class SubJobWorker extends Thread { MultiJobProject multiJobProject; MultiJobBuild multiJobBuild; BuildListener listener; SubTask subTask; BlockingQueue<SubTask> queue; SubJobWorker(MultiJobProject multiJobProject, MultiJobBuild multiJobBuild, BuildListener listener, SubTask subTask, BlockingQueue<SubTask> queue) { this.multiJobBuild = multiJobBuild; this.multiJobProject = multiJobProject; this.listener = listener; this.subTask = subTask; this.queue = queue; } public void run() { Result result = null; AbstractBuild jobBuild = null; try { QueueTaskFuture<AbstractBuild> future = (QueueTaskFuture<AbstractBuild>) subTask.future; while (true) { if (future.isCancelled() && jobBuild == null) { break; } try { jobBuild = future.getStartCondition().get(5, TimeUnit.SECONDS); updateSubBuild(multiJobBuild, multiJobProject, jobBuild); } catch (Exception e) { if (e instanceof TimeoutException) continue; else { throw e; } } if (future.isDone()) break; try { Thread.sleep(1000); } catch (InterruptedException e) { future.cancel(true); throw new InterruptedException(); } } if (jobBuild != null) { result = jobBuild.getResult(); updateSubBuild(multiJobBuild, multiJobProject, jobBuild, result); ChangeLogSet<Entry> changeLogSet = jobBuild.getChangeSet(); multiJobBuild.addChangeLogSet(changeLogSet); reportFinish(listener, jobBuild, result); addBuildEnvironmentVariables(multiJobBuild, jobBuild, listener); subTask.result = result; } } catch (Exception e) { e.printStackTrace(); } if (jobBuild == null) { updateSubBuild(multiJobBuild, multiJobProject, subTask.phaseConfig); } queue.add(subTask); } } boolean checkPhaseTermination(SubTask subTask, List<SubTask> subTasks) { try { KillPhaseOnJobResultCondition killCondition = subTask.phaseConfig .getKillPhaseOnJobResultCondition(); if (killCondition.equals(KillPhaseOnJobResultCondition.NEVER)) return false; if (killCondition.isKillPhase(subTask.result)) { for (SubTask _subTask : subTasks) _subTask.future.cancel(true); } return true; } catch (Exception e) { return false; } } private static final class SubTask { Future<AbstractBuild> future; PhaseJobsConfig phaseConfig; Result result; SubTask(Future<AbstractBuild> future, PhaseJobsConfig phaseConfig) { this.future = future; this.phaseConfig = phaseConfig; } } private void reportStart(BuildListener listener, AbstractProject subJob) { listener.getLogger().printf( "Starting build job %s.\n", HyperlinkNote.encodeTo('/' + subJob.getUrl(), subJob.getFullName())); } private void reportFinish(BuildListener listener, AbstractBuild jobBuild, Result result) { listener.getLogger().println( "Finished Build : " + HyperlinkNote.encodeTo("/" + jobBuild.getUrl() + "/", String.valueOf(jobBuild.getDisplayName())) + " of Job : " + HyperlinkNote.encodeTo('/' + jobBuild.getProject() .getUrl(), jobBuild.getProject().getFullName()) + " with status :" + HyperlinkNote.encodeTo('/' + jobBuild.getUrl() + "/console", result.toString())); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, PhaseJobsConfig phaseConfig) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), phaseConfig.getJobName(), 0, phaseName, null, BallColor.NOTBUILT.getImage(), "not built", ""); multiJobBuild.addSubBuild(subBuild); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, AbstractBuild jobBuild) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), jobBuild.getProject().getName(), jobBuild.getNumber(), phaseName, null, jobBuild.getIconColor() .getImage(), jobBuild.getDurationString(), jobBuild.getUrl()); multiJobBuild.addSubBuild(subBuild); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, AbstractBuild jobBuild, Result result) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), jobBuild.getProject().getName(), jobBuild.getNumber(), phaseName, result, jobBuild .getIconColor().getImage(), jobBuild.getDurationString(), jobBuild.getUrl()); multiJobBuild.addSubBuild(subBuild); } @SuppressWarnings("rawtypes") private void addBuildEnvironmentVariables(MultiJobBuild thisBuild, AbstractBuild jobBuild, BuildListener listener) { // Env variables map Map<String, String> variables = new HashMap<String, String>(); String jobName = jobBuild.getProject().getName(); String jobNameSafe = jobName.replaceAll("[^A-Za-z0-9]", "_") .toUpperCase(); String buildNumber = Integer.toString(jobBuild.getNumber()); String buildResult = jobBuild.getResult().toString(); // These will always reference the last build variables.put("LAST_TRIGGERED_JOB_NAME", jobName); variables.put(jobNameSafe + "_BUILD_NUMBER", buildNumber); variables.put(jobNameSafe + "_BUILD_RESULT", buildResult); if (variables.get("TRIGGERED_JOB_NAMES") == null) { variables.put("TRIGGERED_JOB_NAMES", jobName); } else { String triggeredJobNames = variables.get("TRIGGERED_JOB_NAMES") + "," + jobName; variables.put("TRIGGERED_JOB_NAMES", triggeredJobNames); } if (variables.get("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe) == null) { variables.put("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe, "1"); } else { String runCount = Integer.toString(Integer.parseInt(variables .get("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe)) + 1); variables.put("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe, runCount); } // Set the new build variables map injectEnvVars(thisBuild, listener, variables); } /** * Method for properly injecting environment variables via EnvInject plugin. * Method based off logic in {@link EnvInjectBuilder#perform} */ private void injectEnvVars(AbstractBuild<?, ?> build, BuildListener listener, Map<String, String> incomingVars) { EnvInjectLogger logger = new EnvInjectLogger(listener); FilePath ws = build.getWorkspace(); EnvInjectActionSetter envInjectActionSetter = new EnvInjectActionSetter( ws); EnvInjectEnvVars envInjectEnvVarsService = new EnvInjectEnvVars(logger); try { EnvInjectVariableGetter variableGetter = new EnvInjectVariableGetter(); Map<String, String> previousEnvVars = variableGetter .getEnvVarsPreviousSteps(build, logger); // Get current envVars Map<String, String> variables = new HashMap<String, String>( previousEnvVars); // Resolve variables final Map<String, String> resultVariables = envInjectEnvVarsService .getMergedVariables(variables, incomingVars); // Set the new build variables map build.addAction(new EnvInjectBuilderContributionAction( resultVariables)); // Add or get the existing action to add new env vars envInjectActionSetter.addEnvVarsToEnvInjectBuildAction(build, resultVariables); } catch (Throwable throwable) { listener.getLogger() .println( "[MultiJob] - [ERROR] - Problems occurs on injecting env vars as a build step: " + throwable.getMessage()); } } @SuppressWarnings("rawtypes") private void prepareActions(AbstractBuild build, AbstractProject project, PhaseJobsConfig projectConfig, BuildListener listener, List<Action> actions) throws IOException, InterruptedException { List<Action> parametersActions = null; // if (projectConfig.hasProperties()) { parametersActions = (List<Action>) projectConfig.getActions(build, listener, project, projectConfig.isCurrParams()); actions.addAll(parametersActions); // } } public String getPhaseName() { return phaseName; } public void setPhaseName(String phaseName) { this.phaseName = phaseName; } public List<PhaseJobsConfig> getPhaseJobs() { return phaseJobs; } public void setPhaseJobs(List<PhaseJobsConfig> phaseJobs) { this.phaseJobs = phaseJobs; } public boolean phaseNameExist(String phaseName) { for (PhaseJobsConfig phaseJob : phaseJobs) { if (phaseJob.getDisplayName().equals(phaseName)) { return true; } } return false; } private final static class PhaseSubJob { AbstractProject job; PhaseSubJob(AbstractProject job) { this.job = job; } } @Extension public static class DescriptorImpl extends BuildStepDescriptor<Builder> { @SuppressWarnings("rawtypes") @Override public boolean isApplicable(Class<? extends AbstractProject> jobType) { return jobType.equals(MultiJobProject.class); } @Override public String getDisplayName() { return "MultiJob Phase"; } @Override public Builder newInstance(StaplerRequest req, JSONObject formData) throws FormException { return req.bindJSON(MultiJobBuilder.class, formData); } @Override public boolean configure(StaplerRequest req, JSONObject formData) { save(); return true; } } @SuppressWarnings("rawtypes") public void buildDependencyGraph(AbstractProject owner, DependencyGraph graph) { Jenkins jenkins = Jenkins.getInstance(); List<PhaseJobsConfig> phaseJobsConfigs = getPhaseJobs(); if (phaseJobsConfigs == null) return; for (PhaseJobsConfig project : phaseJobsConfigs) { Item topLevelItem = jenkins.getItemByFullName(project.getJobName()); if (topLevelItem instanceof AbstractProject) { Dependency dependency = new Dependency(owner, (AbstractProject) topLevelItem) { @Override public boolean shouldTriggerBuild(AbstractBuild build, TaskListener listener, List<Action> actions) { return false; } }; graph.addDependency(dependency); } } } public boolean onJobRenamed(String oldName, String newName) { boolean changed = false; for (Iterator i = phaseJobs.iterator(); i.hasNext();) { PhaseJobsConfig phaseJobs = (PhaseJobsConfig) i.next(); String jobName = phaseJobs.getJobName(); if (jobName.trim().equals(oldName)) { if (newName != null) { phaseJobs.setJobName(newName); changed = true; } else { i.remove(); changed = true; } } } return changed; } public boolean onJobDeleted(String oldName) { return onJobRenamed(oldName, null); } public static enum ContinuationCondition { SUCCESSFUL("Successful") { @Override public boolean isContinue(Result result) { return result.equals(Result.SUCCESS); } }, UNSTABLE("Stable or Unstable but not Failed") { @Override public boolean isContinue(Result result) { return result.isBetterOrEqualTo(Result.UNSTABLE); } }, COMPLETED("Complete (always continue)") { @Override public boolean isContinue(Result result) { return result.equals(Result.ABORTED) ? true : result .isBetterOrEqualTo(Result.FAILURE); } }, FAILURE("Failed") { @Override public boolean isContinue(Result result) { return result.equals(Result.ABORTED) || result.isBetterOrEqualTo(Result.FAILURE); } }; abstract public boolean isContinue(Result result); private ContinuationCondition(String label) { this.label = label; } final private String label; public String getLabel() { return label; } } public ContinuationCondition getContinuationCondition() { return continuationCondition; } public void setContinuationCondition( ContinuationCondition continuationCondition) { this.continuationCondition = continuationCondition; } }
src/main/java/com/tikal/jenkins/plugins/multijob/MultiJobBuilder.java
package com.tikal.jenkins.plugins.multijob; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.Util; import hudson.console.HyperlinkNote; import hudson.model.Action; import hudson.model.BallColor; import hudson.model.Build; import hudson.model.BuildListener; import hudson.model.DependecyDeclarer; import hudson.model.DependencyGraph; import hudson.model.DependencyGraph.Dependency; import hudson.model.Item; import hudson.model.Result; import hudson.model.TaskListener; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Cause.UpstreamCause; import hudson.model.Run; import hudson.model.queue.QueueTaskFuture; import hudson.scm.ChangeLogSet; import hudson.scm.ChangeLogSet.Entry; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import net.sf.json.JSONObject; import jenkins.model.Jenkins; import org.jenkinsci.lib.envinject.EnvInjectLogger; import org.jenkinsci.plugins.envinject.EnvInjectBuilderContributionAction; import org.jenkinsci.plugins.envinject.EnvInjectBuilder; import org.jenkinsci.plugins.envinject.service.EnvInjectActionSetter; import org.jenkinsci.plugins.envinject.service.EnvInjectEnvVars; import org.jenkinsci.plugins.envinject.service.EnvInjectVariableGetter; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.StaplerRequest; import com.tikal.jenkins.plugins.multijob.MultiJobBuild.SubBuild; import com.tikal.jenkins.plugins.multijob.PhaseJobsConfig.KillPhaseOnJobResultCondition; public class MultiJobBuilder extends Builder implements DependecyDeclarer { /** * The name of the parameter in the build.getBuildVariables() to enable the job build, regardless * of scm changes. */ public static final String BUILD_ALWAYS_KEY = "hudson.scm.multijob.build.always"; /** * List of messages to show to show by console. */ private static final String[] TRIGGER_MESSAGES = { " >> [%s] has changes since last build. Adding to build queue.", " >> [%s] has no changes since last build, but it will be adding to build queue.", " >> [%s] has no changes since last build, but you have enabled the 'build always' function. Adding to build queue.", " >> [%s] has no changes since last build, so it will be skipped." }; private String phaseName; private List<PhaseJobsConfig> phaseJobs; private ContinuationCondition continuationCondition = ContinuationCondition.SUCCESSFUL; @DataBoundConstructor public MultiJobBuilder(String phaseName, List<PhaseJobsConfig> phaseJobs, ContinuationCondition continuationCondition) { this.phaseName = phaseName; this.phaseJobs = Util.fixNull(phaseJobs); this.continuationCondition = continuationCondition; } @Override @SuppressWarnings({ "rawtypes", "unchecked" }) public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { Jenkins jenkins = Jenkins.getInstance(); MultiJobBuild multiJobBuild = (MultiJobBuild) build; MultiJobProject thisProject = multiJobBuild.getProject(); Map<PhaseSubJob, PhaseJobsConfig> phaseSubJobs = new HashMap<PhaseSubJob, PhaseJobsConfig>( phaseJobs.size()); for (PhaseJobsConfig phaseJobConfig : phaseJobs) { Item item = jenkins.getItemByFullName(phaseJobConfig.getJobName()); if (item instanceof AbstractProject) { AbstractProject job = (AbstractProject) item; phaseSubJobs.put(new PhaseSubJob(job), phaseJobConfig); } } List<SubTask> subTasks = new ArrayList<SubTask>(); for (PhaseSubJob phaseSubJob : phaseSubJobs.keySet()) { AbstractProject subJob = phaseSubJob.job; if (subJob.isDisabled()) { listener.getLogger().println( String.format( "Skipping %s. This Job has been disabled.", subJob.getName())); continue; } /* Hope this works!!! If the build contains the variable named hudson.scm.multijob.force.build.always with a value of "true", we force the build always. Useful to overwrite the subJob.poll(...).hasChanges() value. When to build VALUES hasChanges Y Y Y Y N N N N buildOnly Y Y N N Y Y N N buildAlways Y N Y N Y N Y N --------------------------------------------------------------------------- Build Y Y Y Y Y N Y Y hasChanges Y ==> build ==> Y N buildAlways N ==> build ==> N otherwise ==> Y -------------------------- If the job has SCM changes then message = 0 ==> Add to queue If the job has no SCM changes: If !buildOnlyIfSCMChanges message = 1 ==> No buildOnly, add to queue If buildAlways, the message = 2 ==> No SCM changes, but forced to build. If !buildAlways, then message = 3 ==> No SCM changes, not forced to build. Skipped. */ PhaseJobsConfig phaseConfig = phaseSubJobs.get(phaseSubJob); final boolean buildOnlyIfSCMChanges = phaseConfig.isBuildOnlyIfSCMChanges(); final boolean buildAlways = Boolean.valueOf(build.getBuildVariables().get(BUILD_ALWAYS_KEY)); final boolean hasChanges = subJob.poll(listener).hasChanges(); final int message = (hasChanges) ? 0 : (!buildOnlyIfSCMChanges ? 1 : ((buildAlways) ? 2 : 3) ); listener.getLogger().println(String.format(TRIGGER_MESSAGES[message], subJob.getName())); if (message == 3) { continue; } reportStart(listener, subJob); List<Action> actions = new ArrayList<Action>(); prepareActions(multiJobBuild, subJob, phaseConfig, listener, actions); while (subJob.isInQueue()) { TimeUnit.SECONDS.sleep(subJob.getQuietPeriod()); } Future<AbstractBuild> future = null; if (!phaseConfig.isDisableJob()) { future = subJob.scheduleBuild2(subJob.getQuietPeriod(), new UpstreamCause((Run) multiJobBuild), actions.toArray(new Action[0])); } if (future != null) { subTasks.add(new SubTask(future, phaseConfig)); } else { listener.getLogger().println( String.format("Warning: %s sub job is disabled.", subJob.getName())); } } if (subTasks.size() < 1) return true; ExecutorService executor = Executors .newFixedThreadPool(subTasks.size()); Set<Result> jobResults = new HashSet<Result>(); BlockingQueue<SubTask> queue = new ArrayBlockingQueue<SubTask>( subTasks.size()); for (SubTask subTask : subTasks) { Runnable worker = new SubJobWorker(thisProject, multiJobBuild, listener, subTask, queue); executor.execute(worker); } executor.shutdown(); int resultCounter = 0; while (!executor.isTerminated()) { SubTask subTask = queue.take(); resultCounter++; if (subTask.result != null) { jobResults.add(subTask.result); checkPhaseTermination(subTask, subTasks); } if (subTasks.size() == resultCounter) break; } executor.shutdownNow(); for (Result result : jobResults) { if (!continuationCondition.isContinue(result)) { return false; } } return true; } private final class SubJobWorker extends Thread { MultiJobProject multiJobProject; MultiJobBuild multiJobBuild; BuildListener listener; SubTask subTask; BlockingQueue<SubTask> queue; SubJobWorker(MultiJobProject multiJobProject, MultiJobBuild multiJobBuild, BuildListener listener, SubTask subTask, BlockingQueue<SubTask> queue) { this.multiJobBuild = multiJobBuild; this.multiJobProject = multiJobProject; this.listener = listener; this.subTask = subTask; this.queue = queue; } public void run() { Result result = null; AbstractBuild jobBuild = null; try { QueueTaskFuture<AbstractBuild> future = (QueueTaskFuture<AbstractBuild>) subTask.future; while (true) { if (future.isCancelled() && jobBuild == null) { break; } try { jobBuild = future.getStartCondition().get(5, TimeUnit.SECONDS); updateSubBuild(multiJobBuild, multiJobProject, jobBuild); } catch (Exception e) { if (e instanceof TimeoutException) continue; else { throw e; } } if (future.isDone()) break; try { Thread.sleep(1000); } catch (InterruptedException e) { future.cancel(true); throw new InterruptedException(); } } if (jobBuild != null) { result = jobBuild.getResult(); updateSubBuild(multiJobBuild, multiJobProject, jobBuild, result); ChangeLogSet<Entry> changeLogSet = jobBuild.getChangeSet(); multiJobBuild.addChangeLogSet(changeLogSet); reportFinish(listener, jobBuild, result); addBuildEnvironmentVariables(multiJobBuild, jobBuild, listener); subTask.result = result; } } catch (Exception e) { e.printStackTrace(); } if (jobBuild == null) { updateSubBuild(multiJobBuild, multiJobProject, subTask.phaseConfig); } queue.add(subTask); } } boolean checkPhaseTermination(SubTask subTask, List<SubTask> subTasks) { try { KillPhaseOnJobResultCondition killCondition = subTask.phaseConfig .getKillPhaseOnJobResultCondition(); if (killCondition.equals(KillPhaseOnJobResultCondition.NEVER)) return false; if (killCondition.isKillPhase(subTask.result)) { for (SubTask _subTask : subTasks) _subTask.future.cancel(true); } return true; } catch (Exception e) { return false; } } private static final class SubTask { Future<AbstractBuild> future; PhaseJobsConfig phaseConfig; Result result; SubTask(Future<AbstractBuild> future, PhaseJobsConfig phaseConfig) { this.future = future; this.phaseConfig = phaseConfig; } } private void reportStart(BuildListener listener, AbstractProject subJob) { listener.getLogger().printf( "Starting build job %s.\n", HyperlinkNote.encodeTo('/' + subJob.getUrl(), subJob.getFullName())); } private void reportFinish(BuildListener listener, AbstractBuild jobBuild, Result result) { listener.getLogger().println( "Finished Build : " + HyperlinkNote.encodeTo("/" + jobBuild.getUrl() + "/", String.valueOf(jobBuild.getDisplayName())) + " of Job : " + HyperlinkNote.encodeTo('/' + jobBuild.getProject() .getUrl(), jobBuild.getProject().getFullName()) + " with status :" + HyperlinkNote.encodeTo('/' + jobBuild.getUrl() + "/console", result.toString())); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, PhaseJobsConfig phaseConfig) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), phaseConfig.getJobName(), 0, phaseName, null, BallColor.NOTBUILT.getImage(), "not built", ""); multiJobBuild.addSubBuild(subBuild); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, AbstractBuild jobBuild) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), jobBuild.getProject().getName(), jobBuild.getNumber(), phaseName, null, jobBuild.getIconColor() .getImage(), jobBuild.getDurationString(), jobBuild.getUrl()); multiJobBuild.addSubBuild(subBuild); } private void updateSubBuild(MultiJobBuild multiJobBuild, MultiJobProject multiJobProject, AbstractBuild jobBuild, Result result) { SubBuild subBuild = new SubBuild(multiJobProject.getName(), multiJobBuild.getNumber(), jobBuild.getProject().getName(), jobBuild.getNumber(), phaseName, result, jobBuild .getIconColor().getImage(), jobBuild.getDurationString(), jobBuild.getUrl()); multiJobBuild.addSubBuild(subBuild); } @SuppressWarnings("rawtypes") private void addBuildEnvironmentVariables(MultiJobBuild thisBuild, AbstractBuild jobBuild, BuildListener listener) { // Env variables map Map<String, String> variables = new HashMap<String, String>(); String jobName = jobBuild.getProject().getName(); String jobNameSafe = jobName.replaceAll("[^A-Za-z0-9]", "_") .toUpperCase(); String buildNumber = Integer.toString(jobBuild.getNumber()); String buildResult = jobBuild.getResult().toString(); // These will always reference the last build variables.put("LAST_TRIGGERED_JOB_NAME", jobName); variables.put(jobNameSafe + "_BUILD_NUMBER", buildNumber); variables.put(jobNameSafe + "_BUILD_RESULT", buildResult); if (variables.get("TRIGGERED_JOB_NAMES") == null) { variables.put("TRIGGERED_JOB_NAMES", jobName); } else { String triggeredJobNames = variables.get("TRIGGERED_JOB_NAMES") + "," + jobName; variables.put("TRIGGERED_JOB_NAMES", triggeredJobNames); } if (variables.get("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe) == null) { variables.put("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe, "1"); } else { String runCount = Integer.toString(Integer.parseInt(variables .get("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe)) + 1); variables.put("TRIGGERED_BUILD_RUN_COUNT_" + jobNameSafe, runCount); } // Set the new build variables map injectEnvVars(thisBuild, listener, variables); } /** * Method for properly injecting environment variables via EnvInject plugin. * Method based off logic in {@link EnvInjectBuilder#perform} */ private void injectEnvVars(AbstractBuild<?, ?> build, BuildListener listener, Map<String, String> incomingVars) { EnvInjectLogger logger = new EnvInjectLogger(listener); FilePath ws = build.getWorkspace(); EnvInjectActionSetter envInjectActionSetter = new EnvInjectActionSetter( ws); EnvInjectEnvVars envInjectEnvVarsService = new EnvInjectEnvVars(logger); try { EnvInjectVariableGetter variableGetter = new EnvInjectVariableGetter(); Map<String, String> previousEnvVars = variableGetter .getEnvVarsPreviousSteps(build, logger); // Get current envVars Map<String, String> variables = new HashMap<String, String>( previousEnvVars); // Resolve variables final Map<String, String> resultVariables = envInjectEnvVarsService .getMergedVariables(variables, incomingVars); // Set the new build variables map build.addAction(new EnvInjectBuilderContributionAction( resultVariables)); // Add or get the existing action to add new env vars envInjectActionSetter.addEnvVarsToEnvInjectBuildAction(build, resultVariables); } catch (Throwable throwable) { listener.getLogger() .println( "[MultiJob] - [ERROR] - Problems occurs on injecting env vars as a build step: " + throwable.getMessage()); } } @SuppressWarnings("rawtypes") private void prepareActions(AbstractBuild build, AbstractProject project, PhaseJobsConfig projectConfig, BuildListener listener, List<Action> actions) throws IOException, InterruptedException { List<Action> parametersActions = null; // if (projectConfig.hasProperties()) { parametersActions = (List<Action>) projectConfig.getActions(build, listener, project, projectConfig.isCurrParams()); actions.addAll(parametersActions); // } } public String getPhaseName() { return phaseName; } public void setPhaseName(String phaseName) { this.phaseName = phaseName; } public List<PhaseJobsConfig> getPhaseJobs() { return phaseJobs; } public void setPhaseJobs(List<PhaseJobsConfig> phaseJobs) { this.phaseJobs = phaseJobs; } public boolean phaseNameExist(String phaseName) { for (PhaseJobsConfig phaseJob : phaseJobs) { if (phaseJob.getDisplayName().equals(phaseName)) { return true; } } return false; } private final static class PhaseSubJob { AbstractProject job; PhaseSubJob(AbstractProject job) { this.job = job; } } @Extension public static class DescriptorImpl extends BuildStepDescriptor<Builder> { @SuppressWarnings("rawtypes") @Override public boolean isApplicable(Class<? extends AbstractProject> jobType) { return jobType.equals(MultiJobProject.class); } @Override public String getDisplayName() { return "MultiJob Phase"; } @Override public Builder newInstance(StaplerRequest req, JSONObject formData) throws FormException { return req.bindJSON(MultiJobBuilder.class, formData); } @Override public boolean configure(StaplerRequest req, JSONObject formData) { save(); return true; } } @SuppressWarnings("rawtypes") public void buildDependencyGraph(AbstractProject owner, DependencyGraph graph) { Jenkins jenkins = Jenkins.getInstance(); List<PhaseJobsConfig> phaseJobsConfigs = getPhaseJobs(); if (phaseJobsConfigs == null) return; for (PhaseJobsConfig project : phaseJobsConfigs) { Item topLevelItem = jenkins.getItemByFullName(project.getJobName()); if (topLevelItem instanceof AbstractProject) { Dependency dependency = new Dependency(owner, (AbstractProject) topLevelItem) { @Override public boolean shouldTriggerBuild(AbstractBuild build, TaskListener listener, List<Action> actions) { return false; } }; graph.addDependency(dependency); } } } public boolean onJobRenamed(String oldName, String newName) { boolean changed = false; for (Iterator i = phaseJobs.iterator(); i.hasNext();) { PhaseJobsConfig phaseJobs = (PhaseJobsConfig) i.next(); String jobName = phaseJobs.getJobName(); if (jobName.trim().equals(oldName)) { if (newName != null) { phaseJobs.setJobName(newName); changed = true; } else { i.remove(); changed = true; } } } return changed; } public boolean onJobDeleted(String oldName) { return onJobRenamed(oldName, null); } public static enum ContinuationCondition { SUCCESSFUL("Successful") { @Override public boolean isContinue(Result result) { return result.equals(Result.SUCCESS); } }, UNSTABLE("Stable or Unstable but not Failed") { @Override public boolean isContinue(Result result) { return result.isBetterOrEqualTo(Result.UNSTABLE); } }, COMPLETED("Complete (always continue)") { @Override public boolean isContinue(Result result) { return result.equals(Result.ABORTED) ? true : result .isBetterOrEqualTo(Result.FAILURE); } }, FAILURE("Failed") { @Override public boolean isContinue(Result result) { return result.equals(Result.ABORTED) || result.isBetterOrEqualTo(Result.FAILURE); } }; abstract public boolean isContinue(Result result); private ContinuationCondition(String label) { this.label = label; } final private String label; public String getLabel() { return label; } } public ContinuationCondition getContinuationCondition() { return continuationCondition; } public void setContinuationCondition( ContinuationCondition continuationCondition) { this.continuationCondition = continuationCondition; } }
Fixed the code which causes fail test on ConditionalPhaseTest
src/main/java/com/tikal/jenkins/plugins/multijob/MultiJobBuilder.java
Fixed the code which causes fail test on ConditionalPhaseTest
<ide><path>rc/main/java/com/tikal/jenkins/plugins/multijob/MultiJobBuilder.java <ide> import hudson.model.queue.QueueTaskFuture; <ide> import hudson.scm.ChangeLogSet; <ide> import hudson.scm.ChangeLogSet.Entry; <add>import hudson.scm.SCM; <add>import hudson.scm.SCMRevisionState; <ide> import hudson.tasks.BuildStepDescriptor; <ide> import hudson.tasks.Builder; <ide> <ide> -------------------------- <ide> If the job has SCM changes then message = 0 ==> Add to queue <ide> If the job has no SCM changes: <del> If !buildOnlyIfSCMChanges message = 1 ==> No buildOnly, add to queue <del> If buildAlways, the message = 2 ==> No SCM changes, but forced to build. <add> If !buildOnlyIfSCMChanges, then message = 1 ==> No buildOnly, add to queue <add> If buildAlways, then message = 2 ==> No SCM changes, but forced to build. <ide> If !buildAlways, then message = 3 ==> No SCM changes, not forced to build. Skipped. <ide> <ide> */ <del> PhaseJobsConfig phaseConfig = phaseSubJobs.get(phaseSubJob); <add> final SCM scm = subJob.getScm(); <add> final SCMRevisionState scmRS = scm.calcRevisionsFromBuild((AbstractBuild) subJob.getLastBuild(), launcher, listener); <add> final boolean hasChanges = scm.poll(subJob, launcher, subJob.getWorkspace(), listener, scmRS).hasChanges(); <add> <add> final PhaseJobsConfig phaseConfig = phaseSubJobs.get(phaseSubJob); <ide> final boolean buildOnlyIfSCMChanges = phaseConfig.isBuildOnlyIfSCMChanges(); <ide> final boolean buildAlways = Boolean.valueOf(build.getBuildVariables().get(BUILD_ALWAYS_KEY)); <del> final boolean hasChanges = subJob.poll(listener).hasChanges(); <del> <add> <ide> final int message = <ide> (hasChanges) <ide> ? 0 <ide> } <ide> reportStart(listener, subJob); <ide> List<Action> actions = new ArrayList<Action>(); <del> prepareActions(multiJobBuild, subJob, phaseConfig, listener, <del> actions); <add> prepareActions(multiJobBuild, subJob, phaseConfig, listener, actions); <ide> <ide> while (subJob.isInQueue()) { <ide> TimeUnit.SECONDS.sleep(subJob.getQuietPeriod()); <ide> listener, project, projectConfig.isCurrParams()); <ide> actions.addAll(parametersActions); <ide> // } <del> <ide> } <ide> <ide> public String getPhaseName() {