lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
apache-2.0
5d1b558ca36b953f14d6c4b46b137785183462f5
0
mtransitapps/ca-niagara-falls-wego-bus-parser,mtransitapps/ca-niagara-falls-wego-bus-parser
package org.mtransit.parser.ca_niagara_falls_wego_bus; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.mtransit.parser.CleanUtils; import org.mtransit.parser.DefaultAgencyTools; import org.mtransit.parser.Pair; import org.mtransit.parser.SplitUtils; import org.mtransit.parser.SplitUtils.RouteTripSpec; import org.mtransit.parser.Utils; import org.mtransit.parser.gtfs.data.GCalendar; import org.mtransit.parser.gtfs.data.GCalendarDate; import org.mtransit.parser.gtfs.data.GRoute; import org.mtransit.parser.gtfs.data.GSpec; import org.mtransit.parser.gtfs.data.GStop; import org.mtransit.parser.gtfs.data.GTrip; import org.mtransit.parser.gtfs.data.GTripStop; import org.mtransit.parser.mt.data.MAgency; import org.mtransit.parser.mt.data.MDirectionType; import org.mtransit.parser.mt.data.MRoute; import org.mtransit.parser.mt.data.MTrip; import org.mtransit.parser.mt.data.MTripStop; // https://niagaraopendata.ca/dataset/niagara-region-transit-gtfs // https://niagaraopendata.ca/dataset/niagara-region-transit-gtfs/resource/cc2fda23-0cab-40b7-b264-1cdb01e08fea // https://maps.niagararegion.ca/googletransit/NiagaraRegionTransit.zip public class NiagaraFallsWEGOBusAgencyTools extends DefaultAgencyTools { public static void main(String[] args) { if (args == null || args.length == 0) { args = new String[3]; args[0] = "input/gtfs.zip"; args[1] = "../../mtransitapps/ca-niagara-falls-wego-bus-android/res/raw/"; args[2] = ""; // files-prefix } new NiagaraFallsWEGOBusAgencyTools().start(args); } private HashSet<String> serviceIds; @Override public void start(String[] args) { System.out.printf("\nGenerating WEGO bus data..."); long start = System.currentTimeMillis(); this.serviceIds = extractUsefulServiceIds(args, this, true); super.start(args); System.out.printf("\nGenerating WEGO bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start)); } @Override public boolean excludingAll() { return this.serviceIds != null && this.serviceIds.isEmpty(); } @Override public boolean excludeCalendar(GCalendar gCalendar) { if (this.serviceIds != null) { return excludeUselessCalendar(gCalendar, this.serviceIds); } return super.excludeCalendar(gCalendar); } @Override public boolean excludeCalendarDate(GCalendarDate gCalendarDates) { if (this.serviceIds != null) { return excludeUselessCalendarDate(gCalendarDates, this.serviceIds); } return super.excludeCalendarDate(gCalendarDates); } @Override public boolean excludeTrip(GTrip gTrip) { if (this.serviceIds != null) { return excludeUselessTrip(gTrip, this.serviceIds); } return super.excludeTrip(gTrip); } @Override public boolean excludeRoute(GRoute gRoute) { if (!gRoute.getAgencyId().contains("Niagara Parks Commission WeGo") // && !gRoute.getAgencyId().contains("Niagara Falls Transit & WeGo")) { return true; } if (!gRoute.getRouteId().contains("WEGO") // && !gRoute.getRouteLongName().contains("WEGO") // && !gRoute.getRouteLongName().equals("604 - Orange - NOTL")) { return true; // excluded } return super.excludeRoute(gRoute); } @Override public Integer getAgencyRouteType() { return MAgency.ROUTE_TYPE_BUS; } @Override public long getRouteId(GRoute gRoute) { String routeId = gRoute.getRouteId(); routeId = STARTS_WITH_WEGO_A00.matcher(routeId).replaceAll(StringUtils.EMPTY); Matcher matcher = DIGITS.matcher(routeId); if (matcher.find()) { return Long.parseLong(matcher.group()); } if (RSN_RED.equalsIgnoreCase(gRoute.getRouteShortName())) { return RID_RED; } else if (RSN_BLUE.equalsIgnoreCase(gRoute.getRouteShortName())) { return RID_BLUE; } else if (RSN_GREEN.equalsIgnoreCase(gRoute.getRouteShortName())) { return RID_GREEN; } else if (RSN_ORANGE.equalsIgnoreCase(gRoute.getRouteShortName())) { return RID_ORANGE; } System.out.printf("\nUnexpected route ID for %s!\n", gRoute); System.exit(-1); return -1l; } @Override public String getRouteShortName(GRoute gRoute) { if (Utils.isDigitsOnly(gRoute.getRouteShortName())) { int rsn = Integer.parseInt(gRoute.getRouteShortName()); switch (rsn) { // @formatter:off case 300: return RSN_RED; // Red case 601: return RSN_RED; // Red case (int) RID_BLUE: return RSN_BLUE; // Blue case (int) RID_GREEN: return RSN_GREEN; // Green case (int) RID_ORANGE: return RSN_ORANGE; // Orange // @formatter:on default: System.out.printf("\nUnexpected route short name for %s!\n", gRoute); System.exit(-1); return null; } } return super.getRouteShortName(gRoute); // used by real-time API } private static final String FALLSVIEW_CLIFTON_HL = "Fallsview / Clifton Hl"; private static final String LUNDY_S_LN = "Lundy's Ln"; private static final String NIAGARA_PKS = "Niagara Pks"; private static final String NOTL_SHUTTLE = "NOTL Shuttle"; private static final String RSN_BLUE = "BLUE"; private static final String RSN_GREEN = "GREEN"; private static final String RSN_ORANGE = "ORANGE"; private static final String RSN_PRPLE = "Prple"; private static final String RSN_PURPLE = "Purple"; private static final String RSN_RED = "RED"; private static final long RID_RED = 601L; private static final long RID_BLUE = 602L; private static final long RID_GREEN = 603L; private static final long RID_ORANGE = 604L; @Override public String getRouteLongName(GRoute gRoute) { if (Utils.isDigitsOnly(gRoute.getRouteShortName())) { int rsn = Integer.parseInt(gRoute.getRouteShortName()); switch (rsn) { // @formatter:off case 601: return LUNDY_S_LN; // Red case (int) RID_BLUE: return FALLSVIEW_CLIFTON_HL; // Blue case (int) RID_GREEN: return NIAGARA_PKS; // Green case (int) RID_ORANGE: return NOTL_SHUTTLE; // Orange // @formatter:on default: System.out.printf("\nUnexpected route long name for %s!\n", gRoute); System.exit(-1); return null; } } if (RSN_BLUE.equalsIgnoreCase(gRoute.getRouteShortName())) { return FALLSVIEW_CLIFTON_HL; } else if (RSN_GREEN.equalsIgnoreCase(gRoute.getRouteShortName())) { return NIAGARA_PKS; } else if (RSN_ORANGE.equalsIgnoreCase(gRoute.getRouteShortName())) { return NOTL_SHUTTLE; } else if (RSN_RED.equalsIgnoreCase(gRoute.getRouteShortName())) { return LUNDY_S_LN; } System.out.printf("\nUnexpected route long name for %s!\n", gRoute); System.exit(-1); return null; } private static final String AGENCY_COLOR_ORANGE = "F3632A"; // ORANGE (from PDF) private static final String AGENCY_COLOR = AGENCY_COLOR_ORANGE; @Override public String getAgencyColor() { return AGENCY_COLOR; } private static final String COLOR_5484CC = "5484CC"; private static final String COLOR_45BA67 = "45BA67"; private static final String COLOR_7040A4 = "7040A4"; private static final String COLOR_EE1E23 = "EE1E23"; @Override public String getRouteColor(GRoute gRoute) { if (Utils.isDigitsOnly(gRoute.getRouteShortName())) { int rsn = Integer.parseInt(gRoute.getRouteShortName()); switch (rsn) { // @formatter:off case (int) RID_RED: return COLOR_EE1E23; // Red case (int) RID_BLUE: return COLOR_5484CC; // Blue case (int) RID_GREEN: return COLOR_45BA67; // Green case (int) RID_ORANGE: return null; // same as agency // Orange // @formatter:on default: System.out.printf("\nUnexpected route color for %s!\n", gRoute); System.exit(-1); return null; } } if (RSN_BLUE.equalsIgnoreCase(gRoute.getRouteShortName())) { return COLOR_5484CC; } else if (RSN_GREEN.equalsIgnoreCase(gRoute.getRouteShortName())) { return COLOR_45BA67; } else if (RSN_ORANGE.equalsIgnoreCase(gRoute.getRouteShortName())) { return null; // same as agency } else if (RSN_PURPLE.equalsIgnoreCase(gRoute.getRouteShortName()) || RSN_PRPLE.equalsIgnoreCase(gRoute.getRouteShortName())) { return COLOR_7040A4; } else if (RSN_RED.equalsIgnoreCase(gRoute.getRouteShortName())) { return COLOR_EE1E23; } System.out.printf("\nUnexpected route color for %s!\n", gRoute); System.exit(-1); return null; } private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2; static { HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>(); map2.put(RID_GREEN, new RouteTripSpec(RID_GREEN, // Green MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), // Queenston MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) // Rapidsview .addTripSort(MDirectionType.NORTH.intValue(), // Arrays.asList(new String[] { // "13131", // RAPIDSVIEW" "8006", // != "13031", // <> AERO CAR NORTH "14868", // != AERIAL ADVENTURE "13038", // != BUTFLY Turnaround => "8020", // != Niagara Glen "13070", // != FLORAL CLOCK => })) // .addTripSort(MDirectionType.SOUTH.intValue(), // Arrays.asList(new String[] { // "13070", // FLORAL CLOCK <= "13040", // BUTTERFLY CONS "13038", // BUTFLY Turnaround <= "8033", // Whirlpool Golf S "13032", // AERO CAR SOUTH "13031", // != <> AERO CAR NORTH <= "8035", // == Souvenir City "13131", // RAPIDSVIEW })) // .compileBothTripSort()); ALL_ROUTE_TRIPS2 = map2; } @Override public String cleanStopOriginalId(String gStopId) { gStopId = PRE_STOP_ID.matcher(gStopId).replaceAll(StringUtils.EMPTY); return gStopId; } @Override public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) { if (ALL_ROUTE_TRIPS2.containsKey(routeId)) { return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop, this); } return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop); } @Override public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips(); } return super.splitTrip(mRoute, gTrip, gtfs); } @Override public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId()), this); } return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS); } @Override public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return; // split } mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), gTrip.getDirectionId()); } @Override public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) { List<String> headsignsValues = Arrays.asList(mTrip.getHeadsignValue(), mTripToMerge.getHeadsignValue()); if (mTrip.getRouteId() == RID_RED) { if (Arrays.asList( // "Garner Rd Expres", // <> "Garner Rd" // ).containsAll(headsignsValues)) { mTrip.setHeadsignString("Garner Rd", mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == RID_BLUE) { if (Arrays.asList( // "Marineland", // "Stanley Av & Convention Ctr", // "Convention Ctr" // ).containsAll(headsignsValues)) { mTrip.setHeadsignString("Convention Ctr", mTrip.getHeadsignId()); return true; } } System.out.printf("\nUnexpected trips to merge %s & %s!\n", mTrip, mTripToMerge); System.exit(-1); return false; } private static final Pattern STARTS_WITH_TO = Pattern.compile("(^(.* )?to )", Pattern.CASE_INSENSITIVE); @Override public String cleanTripHeadsign(String tripHeadsign) { if (Utils.isUppercaseOnly(tripHeadsign, true, true)) { tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH); } tripHeadsign = STARTS_WITH_TO.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign); tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign); return CleanUtils.cleanLabel(tripHeadsign); } @Override public String cleanStopName(String gStopName) { if (Utils.isUppercaseOnly(gStopName, true, true)) { gStopName = gStopName.toLowerCase(Locale.ENGLISH); } gStopName = CleanUtils.cleanNumbers(gStopName); gStopName = CleanUtils.cleanStreetTypes(gStopName); return CleanUtils.cleanLabel(gStopName); } private static final String ZERO_0 = "0"; public static final Pattern STARTS_WITH_WEGO_A00 = Pattern.compile("((^){1}((wego||nf)\\_[A-Z]{1}[\\d]{2}(\\_)?(stop)?))", Pattern.CASE_INSENSITIVE); private static final Pattern PRE_STOP_ID = Pattern.compile("((^){1}((wego||nf)\\_[A-Z]{1}[\\d]{2}(\\_)?)(stop|sto)?)", Pattern.CASE_INSENSITIVE); // STOP CODE REQUIRED FOR REAL-TIME API @Override public String getStopCode(GStop gStop) { String stopCode = gStop.getStopCode(); if (stopCode == null || stopCode.length() == 0 || ZERO_0.equals(stopCode)) { stopCode = gStop.getStopId(); } stopCode = PRE_STOP_ID.matcher(stopCode).replaceAll(StringUtils.EMPTY); if ("TablRock".equals(stopCode)) { return "8871"; } if ("Sta&6039".equalsIgnoreCase(stopCode)) { return StringUtils.EMPTY; } if (StringUtils.isEmpty(stopCode)) { System.out.printf("\nUnexptected stop code for %s!\n", gStop); System.exit(-1); return null; } return stopCode; } private static final Pattern DIGITS = Pattern.compile("[\\d]+"); @Override public int getStopId(GStop gStop) { String stopCode = gStop.getStopCode(); if (stopCode == null || stopCode.length() == 0 || ZERO_0.equals(stopCode)) { stopCode = gStop.getStopId(); } stopCode = PRE_STOP_ID.matcher(stopCode).replaceAll(StringUtils.EMPTY); if ("TablRock".equals(stopCode)) { return 8871; } if (Utils.isDigitsOnly(stopCode)) { return Integer.parseInt(stopCode); // using stop code as stop ID } if ("MAR".equalsIgnoreCase(stopCode)) { return 900_000; } else if ("8CD1".equalsIgnoreCase(stopCode)) { return 900_001; } else if ("SCT1".equalsIgnoreCase(stopCode)) { return 900_002; } else if ("SCT2".equalsIgnoreCase(stopCode)) { return 900_003; } else if ("Sta&6039".equalsIgnoreCase(stopCode)) { return 900_004; } else if ("Sta&6683".equalsIgnoreCase(stopCode)) { return 900_005; } else if ("FV&6455".equalsIgnoreCase(stopCode)) { return 900_006; } else if ("FV&6760".equalsIgnoreCase(stopCode)) { return 900_007; } System.out.printf("\nUnexpected stop ID %s!\n", gStop); System.exit(-1); return -1; } }
src/org/mtransit/parser/ca_niagara_falls_wego_bus/NiagaraFallsWEGOBusAgencyTools.java
package org.mtransit.parser.ca_niagara_falls_wego_bus; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.mtransit.parser.CleanUtils; import org.mtransit.parser.DefaultAgencyTools; import org.mtransit.parser.Pair; import org.mtransit.parser.SplitUtils; import org.mtransit.parser.SplitUtils.RouteTripSpec; import org.mtransit.parser.Utils; import org.mtransit.parser.gtfs.data.GCalendar; import org.mtransit.parser.gtfs.data.GCalendarDate; import org.mtransit.parser.gtfs.data.GRoute; import org.mtransit.parser.gtfs.data.GSpec; import org.mtransit.parser.gtfs.data.GStop; import org.mtransit.parser.gtfs.data.GTrip; import org.mtransit.parser.gtfs.data.GTripStop; import org.mtransit.parser.mt.data.MAgency; import org.mtransit.parser.mt.data.MDirectionType; import org.mtransit.parser.mt.data.MRoute; import org.mtransit.parser.mt.data.MTrip; import org.mtransit.parser.mt.data.MTripStop; // https://niagaraopendata.ca/dataset/niagara-region-transit-gtfs // https://niagaraopendata.ca/dataset/niagara-region-transit-gtfs/resource/cc2fda23-0cab-40b7-b264-1cdb01e08fea // https://maps.niagararegion.ca/googletransit/NiagaraRegionTransit.zip public class NiagaraFallsWEGOBusAgencyTools extends DefaultAgencyTools { public static void main(String[] args) { if (args == null || args.length == 0) { args = new String[3]; args[0] = "input/gtfs.zip"; args[1] = "../../mtransitapps/ca-niagara-falls-wego-bus-android/res/raw/"; args[2] = ""; // files-prefix } new NiagaraFallsWEGOBusAgencyTools().start(args); } private HashSet<String> serviceIds; @Override public void start(String[] args) { System.out.printf("\nGenerating WEGO bus data..."); long start = System.currentTimeMillis(); this.serviceIds = extractUsefulServiceIds(args, this, true); super.start(args); System.out.printf("\nGenerating WEGO bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start)); } @Override public boolean excludingAll() { return this.serviceIds != null && this.serviceIds.isEmpty(); } @Override public boolean excludeCalendar(GCalendar gCalendar) { if (this.serviceIds != null) { return excludeUselessCalendar(gCalendar, this.serviceIds); } return super.excludeCalendar(gCalendar); } @Override public boolean excludeCalendarDate(GCalendarDate gCalendarDates) { if (this.serviceIds != null) { return excludeUselessCalendarDate(gCalendarDates, this.serviceIds); } return super.excludeCalendarDate(gCalendarDates); } @Override public boolean excludeTrip(GTrip gTrip) { if (this.serviceIds != null) { return excludeUselessTrip(gTrip, this.serviceIds); } return super.excludeTrip(gTrip); } @Override public boolean excludeRoute(GRoute gRoute) { if (!gRoute.getAgencyId().contains("Niagara Falls Transit & WeGo")) { return true; } if (!gRoute.getRouteLongName().startsWith("WEGO")) { return true; // excluded } return super.excludeRoute(gRoute); } @Override public Integer getAgencyRouteType() { return MAgency.ROUTE_TYPE_BUS; } @Override public long getRouteId(GRoute gRoute) { String routeId = gRoute.getRouteId(); routeId = STARTS_WITH_WEGO_A00.matcher(routeId).replaceAll(StringUtils.EMPTY); Matcher matcher = DIGITS.matcher(routeId); if (matcher.find()) { return Long.parseLong(matcher.group()); } if (RSN_RED.equalsIgnoreCase(gRoute.getRouteShortName())) { return RID_RED; } else if (RSN_BLUE.equalsIgnoreCase(gRoute.getRouteShortName())) { return RID_BLUE; } else if (RSN_GREEN.equalsIgnoreCase(gRoute.getRouteShortName())) { return RID_GREEN; } else if (RSN_ORANGE.equalsIgnoreCase(gRoute.getRouteShortName())) { return RID_ORANGE; } System.out.printf("\nUnexpected route ID for %s!\n", gRoute); System.exit(-1); return -1l; } @Override public String getRouteShortName(GRoute gRoute) { if (Utils.isDigitsOnly(gRoute.getRouteShortName())) { int rsn = Integer.parseInt(gRoute.getRouteShortName()); switch (rsn) { // @formatter:off case 300: return RSN_RED; // Red case 601: return RSN_RED; // Red case (int) RID_BLUE: return RSN_BLUE; // Blue case (int) RID_GREEN: return RSN_GREEN; // Green case (int) RID_ORANGE: return RSN_ORANGE; // Orange // @formatter:on default: System.out.printf("\nUnexpected route short name for %s!\n", gRoute); System.exit(-1); return null; } } return super.getRouteShortName(gRoute); // used by real-time API } private static final String FALLSVIEW_CLIFTON_HL = "Fallsview / Clifton Hl"; private static final String LUNDY_S_LN = "Lundy's Ln"; private static final String NIAGARA_PKS = "Niagara Pks"; private static final String NOTL_SHUTTLE = "NOTL Shuttle"; private static final String RSN_BLUE = "BLUE"; private static final String RSN_GREEN = "GREEN"; private static final String RSN_ORANGE = "ORANGE"; private static final String RSN_PRPLE = "Prple"; private static final String RSN_PURPLE = "Purple"; private static final String RSN_RED = "RED"; private static final long RID_RED = 601L; private static final long RID_BLUE = 602L; private static final long RID_GREEN = 603L; private static final long RID_ORANGE = 604L; @Override public String getRouteLongName(GRoute gRoute) { if (Utils.isDigitsOnly(gRoute.getRouteShortName())) { int rsn = Integer.parseInt(gRoute.getRouteShortName()); switch (rsn) { // @formatter:off case 601: return LUNDY_S_LN; // Red case (int) RID_BLUE: return FALLSVIEW_CLIFTON_HL; // Blue case (int) RID_GREEN: return NIAGARA_PKS; // Green case (int) RID_ORANGE: return NOTL_SHUTTLE; // Orange // @formatter:on default: System.out.printf("\nUnexpected route long name for %s!\n", gRoute); System.exit(-1); return null; } } if (RSN_BLUE.equalsIgnoreCase(gRoute.getRouteShortName())) { return FALLSVIEW_CLIFTON_HL; } else if (RSN_GREEN.equalsIgnoreCase(gRoute.getRouteShortName())) { return NIAGARA_PKS; } else if (RSN_ORANGE.equalsIgnoreCase(gRoute.getRouteShortName())) { return NOTL_SHUTTLE; } else if (RSN_RED.equalsIgnoreCase(gRoute.getRouteShortName())) { return LUNDY_S_LN; } System.out.printf("\nUnexpected route long name for %s!\n", gRoute); System.exit(-1); return null; } private static final String AGENCY_COLOR_ORANGE = "F3632A"; // ORANGE (from PDF) private static final String AGENCY_COLOR = AGENCY_COLOR_ORANGE; @Override public String getAgencyColor() { return AGENCY_COLOR; } private static final String COLOR_5484CC = "5484CC"; private static final String COLOR_45BA67 = "45BA67"; private static final String COLOR_7040A4 = "7040A4"; private static final String COLOR_EE1E23 = "EE1E23"; @Override public String getRouteColor(GRoute gRoute) { if (Utils.isDigitsOnly(gRoute.getRouteShortName())) { int rsn = Integer.parseInt(gRoute.getRouteShortName()); switch (rsn) { // @formatter:off case (int) RID_RED: return COLOR_EE1E23; // Red case (int) RID_BLUE: return COLOR_5484CC; // Blue case (int) RID_GREEN: return COLOR_45BA67; // Green case (int) RID_ORANGE: return null; // same as agency // Orange // @formatter:on default: System.out.printf("\nUnexpected route color for %s!\n", gRoute); System.exit(-1); return null; } } if (RSN_BLUE.equalsIgnoreCase(gRoute.getRouteShortName())) { return COLOR_5484CC; } else if (RSN_GREEN.equalsIgnoreCase(gRoute.getRouteShortName())) { return COLOR_45BA67; } else if (RSN_ORANGE.equalsIgnoreCase(gRoute.getRouteShortName())) { return null; // same as agency } else if (RSN_PURPLE.equalsIgnoreCase(gRoute.getRouteShortName()) || RSN_PRPLE.equalsIgnoreCase(gRoute.getRouteShortName())) { return COLOR_7040A4; } else if (RSN_RED.equalsIgnoreCase(gRoute.getRouteShortName())) { return COLOR_EE1E23; } System.out.printf("\nUnexpected route color for %s!\n", gRoute); System.exit(-1); return null; } private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2; static { HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>(); map2.put(RID_GREEN, new RouteTripSpec(RID_GREEN, // Green MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), // Queenston MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) // Rapidsview .addTripSort(MDirectionType.NORTH.intValue(), // Arrays.asList(new String[] { // "13131", // RAPIDSVIEW" "8006", // != "13031", // <> AERO CAR NORTH "14868", // != AERIAL ADVENTURE "13038", // != BUTFLY Turnaround => "8020", // != Niagara Glen "13070", // != FLORAL CLOCK => })) // .addTripSort(MDirectionType.SOUTH.intValue(), // Arrays.asList(new String[] { // "13070", // FLORAL CLOCK <= "13040", // BUTTERFLY CONS "13038", // BUTFLY Turnaround <= "8033", // Whirlpool Golf S "13032", // AERO CAR SOUTH "13031", // != <> AERO CAR NORTH <= "8035", // == Souvenir City "13131", // RAPIDSVIEW })) // .compileBothTripSort()); ALL_ROUTE_TRIPS2 = map2; } @Override public String cleanStopOriginalId(String gStopId) { gStopId = PRE_STOP_ID.matcher(gStopId).replaceAll(StringUtils.EMPTY); return gStopId; } @Override public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) { if (ALL_ROUTE_TRIPS2.containsKey(routeId)) { return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop, this); } return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop); } @Override public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips(); } return super.splitTrip(mRoute, gTrip, gtfs); } @Override public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId()), this); } return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS); } @Override public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) { if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) { return; // split } mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), gTrip.getDirectionId()); } @Override public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) { List<String> headsignsValues = Arrays.asList(mTrip.getHeadsignValue(), mTripToMerge.getHeadsignValue()); if (mTrip.getRouteId() == RID_RED) { if (Arrays.asList( // "Garner Rd Expres", // <> "Garner Rd" // ).containsAll(headsignsValues)) { mTrip.setHeadsignString("Garner Rd", mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == RID_BLUE) { if (Arrays.asList( // "Marineland", // "Convention Ctr" // ).containsAll(headsignsValues)) { mTrip.setHeadsignString("Convention Ctr", mTrip.getHeadsignId()); return true; } } System.out.printf("\nUnexpected trips to merge %s & %s!\n", mTrip, mTripToMerge); System.exit(-1); return false; } private static final Pattern STARTS_WITH_TO = Pattern.compile("(^(.* )?to )", Pattern.CASE_INSENSITIVE); @Override public String cleanTripHeadsign(String tripHeadsign) { if (Utils.isUppercaseOnly(tripHeadsign, true, true)) { tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH); } tripHeadsign = STARTS_WITH_TO.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign); tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign); return CleanUtils.cleanLabel(tripHeadsign); } @Override public String cleanStopName(String gStopName) { if (Utils.isUppercaseOnly(gStopName, true, true)) { gStopName = gStopName.toLowerCase(Locale.ENGLISH); } gStopName = CleanUtils.cleanNumbers(gStopName); gStopName = CleanUtils.cleanStreetTypes(gStopName); return CleanUtils.cleanLabel(gStopName); } private static final String ZERO_0 = "0"; public static final Pattern STARTS_WITH_WEGO_A00 = Pattern.compile("((^){1}((wego||nf)\\_[A-Z]{1}[\\d]{2}(\\_)?(stop)?))", Pattern.CASE_INSENSITIVE); private static final Pattern PRE_STOP_ID = Pattern.compile("((^){1}((wego||nf)\\_[A-Z]{1}[\\d]{2}(\\_)?)(stop|sto)?)", Pattern.CASE_INSENSITIVE); // STOP CODE REQUIRED FOR REAL-TIME API @Override public String getStopCode(GStop gStop) { String stopCode = gStop.getStopCode(); if (stopCode == null || stopCode.length() == 0 || ZERO_0.equals(stopCode)) { stopCode = gStop.getStopId(); } stopCode = PRE_STOP_ID.matcher(stopCode).replaceAll(StringUtils.EMPTY); if ("TablRock".equals(stopCode)) { return "8871"; } if ("Sta&6039".equalsIgnoreCase(stopCode)) { return StringUtils.EMPTY; } if (StringUtils.isEmpty(stopCode)) { System.out.printf("\nUnexptected stop code for %s!\n", gStop); System.exit(-1); return null; } return stopCode; } private static final Pattern DIGITS = Pattern.compile("[\\d]+"); @Override public int getStopId(GStop gStop) { String stopCode = gStop.getStopCode(); if (stopCode == null || stopCode.length() == 0 || ZERO_0.equals(stopCode)) { stopCode = gStop.getStopId(); } stopCode = PRE_STOP_ID.matcher(stopCode).replaceAll(StringUtils.EMPTY); if ("TablRock".equals(stopCode)) { return 8871; } if (Utils.isDigitsOnly(stopCode)) { return Integer.parseInt(stopCode); // using stop code as stop ID } if ("MAR".equalsIgnoreCase(stopCode)) { return 900_000; } else if ("8CD1".equalsIgnoreCase(stopCode)) { return 900_001; } else if ("SCT1".equalsIgnoreCase(stopCode)) { return 900_002; } else if ("SCT2".equalsIgnoreCase(stopCode)) { return 900_003; } else if ("Sta&6039".equalsIgnoreCase(stopCode)) { return 900_004; } else if ("Sta&6683".equalsIgnoreCase(stopCode)) { return 900_005; } else if ("FV&6455".equalsIgnoreCase(stopCode)) { return 900_006; } else if ("FV&6760".equalsIgnoreCase(stopCode)) { return 900_007; } System.out.printf("\nUnexpected stop ID %s!\n", gStop); System.exit(-1); return -1; } }
Compatibility with latest update
src/org/mtransit/parser/ca_niagara_falls_wego_bus/NiagaraFallsWEGOBusAgencyTools.java
Compatibility with latest update
<ide><path>rc/org/mtransit/parser/ca_niagara_falls_wego_bus/NiagaraFallsWEGOBusAgencyTools.java <ide> <ide> @Override <ide> public boolean excludeRoute(GRoute gRoute) { <del> if (!gRoute.getAgencyId().contains("Niagara Falls Transit & WeGo")) { <add> if (!gRoute.getAgencyId().contains("Niagara Parks Commission WeGo") // <add> && !gRoute.getAgencyId().contains("Niagara Falls Transit & WeGo")) { <ide> return true; <ide> } <del> if (!gRoute.getRouteLongName().startsWith("WEGO")) { <add> if (!gRoute.getRouteId().contains("WEGO") // <add> && !gRoute.getRouteLongName().contains("WEGO") // <add> && !gRoute.getRouteLongName().equals("604 - Orange - NOTL")) { <ide> return true; // excluded <ide> } <ide> return super.excludeRoute(gRoute); <ide> } else if (mTrip.getRouteId() == RID_BLUE) { <ide> if (Arrays.asList( // <ide> "Marineland", // <add> "Stanley Av & Convention Ctr", // <ide> "Convention Ctr" // <ide> ).containsAll(headsignsValues)) { <ide> mTrip.setHeadsignString("Convention Ctr", mTrip.getHeadsignId());
Java
mit
08335cdfb05b0089db591bbad4c763d8e50d94de
0
devYaoYH/battlecode2017_edoctablet
package Scarif_stable; import battlecode.common.*; import java.util.ArrayList; import java.util.HashSet; public class GardenerPlayer extends BasePlayer { //Gardener specific variables private int treeTimer = 0; private int lumberjackTimer = 0; private int soldierTimer = 0; private int scoutTimer = 0; private int tankTimer = 0; static float FARMER_CLEARANCE_RADIUS = 8f; //How far apart should farmers be static final float TREE_RADIUS = 1f; //Radius of player trees private RobotType[] build_order; private boolean build_queue_empty = true; private boolean has_built_robot = false; private boolean enemyEngaged = false; private boolean CHEESE_SCOUT = true; //Farming stuff private MapLocation SPAWNING_ARCHON_LOCATION = null; private MapLocation LAST_KNOWN_GARDENER_LOCATION = null; private boolean settledDown = false; static HashSet<MapLocation> localBlacklist = new HashSet<>(); static HashSet<MapLocation> globalBlacklist = new HashSet<>(); static int globalBlacklist_index = 0; static final int LOCATION_TIMEOUT = 50; //Timeout to get to a new location static int settleTimer; static final int MAX_READ_SETTLE_PTS = 50; //Maximum number of settle pts read static int MAX_INIT_TREES = 2; //Max number of early game trees static float MIN_SEPARATION_DIST = 23f; //If target is further than nf, start new local farm private ArrayList<Direction> current_available_slots = new ArrayList<>(); //Reads current unit composition static int number_gardeners = 0; static int number_lumberjacks = 0; private int surrounding_lumberjacks = 0; static int number_scouts = 0; static int number_soldiers = 0; static int number_tanks = 0; //Spawning constants (Macro control) private Direction SPAWN_DIR = null; //Reserved spawning slot once in position private float BULLET_OVERFLOW = 347; //Overflowing bullets, build something! static final int SOLDIER_DELAY = 17; //Number of turns to spawn a new soldier private float SPAWN_SOLDIER_CHANCE = 0.75f; private float SOLDIER_TO_TREE_RATIO = 1.23f; //Max number of soldiers per tree static int MIN_SOLDIER = 5; //Minimum number of soldiers static int MAX_SOLDIER = 10; //Early game max soldiers static final int SCOUT_DELAY = 100; //Number of turns to spawn a new scout private float SPAWN_SCOUT_CHANCE = 0.0f; private float SCOUT_TO_TREE_RATIO = 0.25f; //Max number of scouts per tree static final int TANK_DELAY = 13; //Number of turns to spawn a new tank private float SPAWN_TANK_CHANCE = 1f; private float TANK_TO_TREE_RATIO = 0.25f; //Max number of tanks per tree private float TANK_DENSITY_THRESHOLD = 0.15f; //Surrounding density < this before tanks can be spawned private float LUMBERJACK_TO_NEUTRAL_RATIO = 0.43f; //Number of lumbers per tree surrounding gardener private float LUMBERJACK_TO_SOLDIER_RATIO = 2f; //Number of lumbers per soldier private final int LUMBERJACK_DELAY = 30; //Number of turns to spawn a new lumberjack private float NEUTRAL_TREE_HEALTH_THRESHOLD = 750f; //Total accumulative health of surrounding trees | above which will spawn lumberjacks //Movement stuff static MapLocation prevLocation = null; //Previous location static int staying_conut = 0; //How many turns spent around there static int stuckRounds = 11; //How many rounds before I'm stuck static int cooldownRounds = 17; static int cooldownTimer = 0; //Turns to run slug path before reverting to direct pathing static float movement_stuck_radius = RobotType.GARDENER.strideRadius/8*stuckRounds; //Stuck distance for 17 rounds static boolean am_stuck = false; //I am stuck where I am... static boolean cooldown_active = false; //Cooldown period where direct pathing is disabled static boolean search_dir = true; //Right/Left bot, true == Right bot private int timeToTarget = 0; private int targetTimer = THE_FINAL_PROBLEM; private int SLUG = 3; //How many levels of slugging private Direction cooldownDir = null; //Try to escape local minimum /* GardenerPlayer is one of our most extensive pieces of code (2nd longest after BasePlayer) as it controls the critical macro strategy Basic logic structure of our gardeners: settleDown = can_settle_down(); if (!settleDown){ if (!is_target_valid(target)){ acquire_target(); } else{ move_to_target(); } } else{ broadcast_locations_for_other_gardeners(); reserve_slot_for_spawning(); spam_trees(); //Max 5 } spawnUnits(); Ofc there's a bunch of additional checks and ad-hoc code in there, but that's about it for our gardeners. We experimented with a spade of different farming techniques for gardeners and in the end, settled on individual hexagonal flower design in a bigger hexagonal lattice. It provided the best balance between settling speed, space efficiency as well as gardener-to-tree ratio. Some points of interest in our gardener code: --> Short-range/Long-range min density-sensing movement --> Scans for local minimum density and moves towards a position that evaluates as such --> If stuck, try scanning further to escape local minima --> Build-order queue --> At high-level plays, mastering the initial build order is critical --> Games are frequently decided in the first 100 rounds --> Sensing valid building locations --> Finer sensing than simply a free circle of radius 3 around gardener If you're a participant, probably such considerations have already crossed your mind at some point during the competition. A stable gardener code is the single most critical piece of code in this year's tournament (well, aside from getting that gardener out from the archons). Personally, our team floundered at the Qualifying tournament due to having unstable gardener code >.< they failed to settle down fast enough and thus resulted in a delayed economic buildup. So to those teams out there (and future participants reading this), identify critical pieces of code early and stabilize them before moving on... */ public GardenerPlayer(RobotController rc) throws GameActionException { super(rc); try { initPlayer(); } catch(Exception e){ e.printStackTrace(); } for (; ; Clock.yield()) { try { startRound(); run(); endRound(); } catch(Exception e){ e.printStackTrace(); } } } private void initPlayer() throws GameActionException { MOVEMENT_SCANNING_DISTANCE = 3.1f; MOVEMENT_SCANNING_RADIUS = 2.1f; SURROUNDING_ROBOTS_OWN = rc.senseNearbyRobots(3f, rc.getTeam()); for (RobotInfo r:SURROUNDING_ROBOTS_OWN){ if (r.type == RobotType.ARCHON){ SPAWNING_ARCHON_LOCATION = r.location; break; } } //todo I'm debugging this... debug = false; //Turn this on for visualizations as to how our gardeners work } public void startRound() throws GameActionException { super.startRound(); SURROUNDING_TREES_OWN = rc.senseNearbyTrees(3f, rc.getTeam()); if (settledDown){ rc.broadcast(SUM_SETTLED,rc.readBroadcast(SUM_SETTLED)+1); } else{ rc.broadcast(SUM_GARDENERS,rc.readBroadcast(SUM_GARDENERS)+1); } build_order = getBuildOrder(); build_queue_empty = true; has_built_robot = false; if (SURROUNDING_ROBOTS_ENEMY.length > 0){ contactRep(); } //Reset farmer settling counter - distributing this piece of control code provides redundancy (what if our archon was destroyed?) if (rc.readBroadcast(CONTROL_FARMER_HEARTBEAT) <= Math.max(0, rc.getRoundNum() - 3)){ rc.broadcast(CONTROL_FARMER, rc.getID()); rc.broadcast(AVAILABLE_FARMING_LOCATIONS, 0); } if (rc.readBroadcast(CONTROL_FARMER) == rc.getID()) { rc.setIndicatorLine(MY_LOCATION, MY_LOCATION.add(new Direction(0f),2f), 0, 0, 0); rc.broadcast(AVAILABLE_FARMING_LOCATIONS, 0); rc.broadcast(CONTROL_FARMER_HEARTBEAT, rc.getRoundNum()); if (rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS) > MAX_BLACKLISTED_LOCATIONS){ rc.broadcast(BLACKLISTED_FARMING_LOCATIONS, 0); } } number_gardeners = rc.readBroadcast(COUNT_GARDENERS); number_lumberjacks = rc.readBroadcast(COUNT_LUMBERJACKS); number_scouts = rc.readBroadcast(COUNT_SCOUTS); number_soldiers = rc.readBroadcast(COUNT_SOLDIERS); number_tanks = rc.readBroadcast(COUNT_TANKS); surrounding_lumberjacks = 0; for (RobotInfo r:SURROUNDING_ROBOTS_OWN){ if (r.type == RobotType.LUMBERJACK) surrounding_lumberjacks++; } //todo check for last known gardener position (sticks close together :D) //We were thinking of some sort of flocking behavior for gardeners (not to stray too far away from others) for (RobotInfo r:SURROUNDING_ROBOTS_OWN){ if (r.type == RobotType.GARDENER){ LAST_KNOWN_GARDENER_LOCATION = r.location; break; } } if (LAST_KNOWN_GARDENER_LOCATION != null) if (debug) rc.setIndicatorLine(MY_LOCATION, LAST_KNOWN_GARDENER_LOCATION, 0, 255, 0); //Check whether enemy has been engaged! if (!enemyEngaged) enemyEngaged = (rc.readBroadcast(ENEMY_ENGAGED) == 1); //Read and store global blacklisted locations //Fetch global blacklist only once (and as blacklisted positions are only appended - previous entries immutable, we simply get updates) int blacklist_size = rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS); System.out.println("Total Blacklisted locations: " + Integer.toString(blacklist_size)); for (int i = globalBlacklist_index; i < blacklist_size; i++){ float locX = intToFloatPrecise(rc.readBroadcast(BLACKLISTED_LOCATIONS_X + i * 2)); float locY = intToFloatPrecise(rc.readBroadcast(BLACKLISTED_LOCATIONS_Y + i * 2)); globalBlacklist.add(new MapLocation(locX, locY)); if (debug) rc.setIndicatorDot(new MapLocation(locX, locY), 255, 255, 0); } globalBlacklist_index = blacklist_size; } private void run() throws GameActionException { //Follow build order for (int i=0;i<MAX_BUILD_ORDER;i++){ if (build_order[i]!=null){ System.out.println("Building: "+build_order[i]); build_queue_empty = false; if (forceBuilding(build_order[i])){ rc.broadcast(BUILD_ORDER_QUEUE+i, 0); } break; } } //Cheese dat scout out if (CHEESE_SCOUT && build_order[0] == null && build_order[1] == null){ if (shouldCheeseScout() || rc.readBroadcast(FORCE_SCOUT_CHEESE) > 80) { //Reads whether that single lone scout has been spawned to collect bullets from trees (thus we cap at 1 scout per game) CHEESE_SCOUT = (rc.readBroadcast(SCOUT_CHEESE_SIGNAL) == 0); if (CHEESE_SCOUT){ if(forceBuilding(RobotType.SCOUT)){ System.out.println("Scout cheeser sent!"); rc.broadcast(SCOUT_CHEESE_SIGNAL, 1); } } } } //Plant trees if (!settledDown){ if (!rc.isCircleOccupiedExceptByThisRobot(MY_LOCATION, 3.01f) && rc.onTheMap(MY_LOCATION, 3f)) current_available_slots = HEXAGONAL_PATTERN; //Gardener starts to farm early //We relax conditions for first farm to start (expedites economic buildup) else if (rc.getTreeCount() == 0) current_available_slots = getBuildSlots(MY_LOCATION, 2); else current_available_slots = getBuildSlots(MY_LOCATION, FARMER_MIN_SLOTS); if (current_available_slots.size() > 0) { //Get most spacious direction - hopefully spawning in this direction will not get units stuck float density = 999999f; int spawn_direction = -1; System.out.println("Total build slots: " + Integer.toString(current_available_slots.size())); for (int i = 0; i < current_available_slots.size(); i++) { System.out.println("Available build slot at: " + Float.toString(current_available_slots.get(i).radians)); float eval = getDensity(MY_LOCATION.add(current_available_slots.get(i), MOVEMENT_SCANNING_DISTANCE), MOVEMENT_SCANNING_RADIUS, OBJECTS); if (eval < density) { density = eval; spawn_direction = i; } } if (spawn_direction != -1) { SPAWN_DIR = current_available_slots.get(spawn_direction); //We reserve a direction for spawning units current_available_slots.remove(spawn_direction); } } } else if (current_available_slots.size() < 1){ //Once settled down, hinge build_slots around spawn_direction (assume hextet of slots are free) //Using previously computed SPAWN_DIR to hinge the other 5 slots around reduces bytecode usage as we won't need to //test surroundings for build slots again. if (SPAWN_DIR != null){ current_available_slots.clear(); for (int i=1;i<6;i++){ current_available_slots.add(SPAWN_DIR.rotateRightRads(DEG_60*i)); } } else { //If SPAWN_DIR is not set, search around for free slots assign SPAWN_DIR current_available_slots = getBuildSlots(MY_LOCATION, 2); if (current_available_slots.size() > 0){ SPAWN_DIR = current_available_slots.get(0); current_available_slots.remove(0); } } } //Debug bytecode costs System.out.println("Bytes used to compute build locations: "+Integer.toString(Clock.getBytecodeNum())); //Debug for spawn direction if (SPAWN_DIR != null) System.out.println("SPAWN_DIR: "+Float.toString(SPAWN_DIR.radians)); //Debug settling down System.out.println("Have I settled? "+Boolean.toString(settledDown)); System.out.println("Max slots around me: "+Integer.toString(Math.max(FARMER_MIN_SLOTS-SURROUNDING_TREES_OWN.length,1))); if (!settledDown){ //Try to spawn a lumberjack //Check if there are trees around if (rc.getRoundNum() > lumberjackTimer) { forceBuilding(RobotType.LUMBERJACK); lumberjackTimer = rc.getRoundNum() + LUMBERJACK_DELAY; } //Acquire new target if none is currently set if (target == null || !isSettleLocationValid(target)){ System.out.println("Computed target validity: "+Clock.getBytecodeNum()); target = acquireSettleLocation(); System.out.println("Acquired target: "+Clock.getBytecodeNum()); } //Move to target location if (target == null){ if (debug) rc.setIndicatorDot(MY_LOCATION, 255, 0, 0); gardenerDensityMove(GARDENER_MOVE_GRANULARITY); //Initial movement algo for the first gardener (or if no nearby locations are available) System.out.println("Computed density move direction: "+Clock.getBytecodeNum()); } else{ if (debug) rc.setIndicatorDot(MY_LOCATION, 0, 255, 0); moveToSettleLocation(); //Navigate to nearest available location (on that global hexagonal lattice for tree clusters) System.out.println("Computed move to target: "+Clock.getBytecodeNum()); } //todo don't settledown if entire grove of trees won't be on the map // && rc.onTheMap(MY_LOCATION, 3f) //We decided to relax the conditions as planting some trees is better off than delaying planting trees (huge economic falloff) if (target == null) { if (rc.getTreeCount() == 0) { //If you're the first gardener to start a cluster of trees, ignore spreading out from surrounding gardeners if (current_available_slots.size() > 0 && isSpreadOut(MY_LOCATION, FARMER_CLEARANCE_RADIUS, false)) settledDown = true; } else if (current_available_slots.size() > 0 && isSpreadOut(MY_LOCATION, FARMER_CLEARANCE_RADIUS, true)) settledDown = true; } } else{ //Announce locations around itself broadcastAvailableSettleLocations(); //Should I plant trees? boolean should_plant_trees = (rc.getRoundNum() > treeTimer); //Maintain a basic army before planting too many trees if (build_order[0] != null && build_order[1] != null && rc.getRoundNum() < EARLY_GAME){ //Skip building trees till first 2 soldiers are out :) should_plant_trees = false; } if (spawnSoldierControl()){ //If I should spawn a lumberjack or soldier or if I've tons of bullets, go build these units first :) should_plant_trees = false; } if (rc.getRoundNum() < EARLY_GAME && SURROUNDING_TREES_OWN.length >= MAX_INIT_TREES && enemyEngaged){ //Stop planting too many trees early game should_plant_trees = false; } //I have tons of bullets, might as well... if (rc.getTeamBullets() > BULLET_OVERFLOW) should_plant_trees = true; //Plant trees if (should_plant_trees) { //Plant trees for (Direction dir : current_available_slots) { System.out.println("Build slot at: "+dir); if (rc.canPlantTree(dir) && rand.nextFloat() < PLANT_TREE_CHANCE) { rc.plantTree(dir); treeTimer = rc.getRoundNum() + TREE_DELAY; } } //Try forcing 5 trees around gardener if no explicit build slots are found if (rc.getRoundNum() > treeTimer && SURROUNDING_TREES_OWN.length < 5 && SURROUNDING_TREES_OWN.length > 0) { if (debug && SPAWN_DIR != null) rc.setIndicatorLine(MY_LOCATION, MY_LOCATION.add(SPAWN_DIR, 3f), 0, 255, 255); Direction cDir = MY_LOCATION.directionTo(SURROUNDING_TREES_OWN[0].location); for (int i = 0; i < 5; i++) { cDir = cDir.rotateRightRads(DEG_60); if (rand.nextFloat() < PLANT_TREE_CHANCE) { System.out.println("Forcing planting of tree at: " + Float.toString(cDir.radians)); System.out.println("Radians between spawn_dir and cDir: "+Float.toString(SPAWN_DIR.radiansBetween(cDir))); if (absRad(SPAWN_DIR.radiansBetween(cDir)) >= DEG_60 && rc.canPlantTree(cDir)){ rc.plantTree(cDir); treeTimer = rc.getRoundNum() + TREE_DELAY; } } } } } //Water surrounding trees TreeInfo lowestHealthTree = null; float lowestHealth = 1000000f; for (TreeInfo tree : SURROUNDING_TREES_OWN) { if (tree.getHealth() < lowestHealth) { lowestHealth = tree.getHealth(); lowestHealthTree = tree; } } if (lowestHealthTree != null) { if (rc.canWater(lowestHealthTree.getID())) { try { rc.water(lowestHealthTree.getID()); } catch (GameActionException e) { e.printStackTrace(); } } } } //Build robots if (build_queue_empty){ if (SURROUNDING_ROBOTS_ENEMY.length > 0){ //Respond to counter enemy units if (!has_built_robot) { switch (SURROUNDING_ROBOTS_ENEMY[0].type) { case ARCHON: has_built_robot = forceBuilding(RobotType.SOLDIER); break; case LUMBERJACK: has_built_robot = forceBuilding(RobotType.SOLDIER); break; case SCOUT: has_built_robot = forceBuilding(RobotType.LUMBERJACK); break; case SOLDIER: has_built_robot = forceBuilding(RobotType.SOLDIER); break; case GARDENER: has_built_robot = forceBuilding(RobotType.LUMBERJACK); break; case TANK: has_built_robot = forceBuilding(RobotType.TANK); break; } } } //The ordering of spawning is important as those placed first has priority if (spawnLumberjackControl() && !has_built_robot) { has_built_robot = forceBuilding(RobotType.LUMBERJACK); if (has_built_robot) lumberjackTimer = rc.getRoundNum() + LUMBERJACK_DELAY; } if (spawnTankControl() && !has_built_robot) { has_built_robot = forceBuilding(RobotType.TANK); if (has_built_robot) tankTimer = rc.getRoundNum() + TANK_DELAY; } if (spawnSoldierControl() && !has_built_robot) { has_built_robot = forceBuilding(RobotType.SOLDIER); if (has_built_robot) soldierTimer = rc.getRoundNum() + SOLDIER_DELAY; } if (spawnScoutControl() && !has_built_robot) { //If you look closely, we pretty much never spawn scouts (but...just in case) has_built_robot = forceBuilding(RobotType.SCOUT); if (has_built_robot) scoutTimer = rc.getRoundNum() + SCOUT_DELAY; } } return; } public void endRound() throws GameActionException { super.endRound(); System.out.println("Before computing stuck: "+Integer.toString(Clock.getBytecodeNum())); if (prevLocation == null){ prevLocation = MY_LOCATION; } else{ if (rc.getRoundNum() % stuckRounds == 0){ //Check every stuckRounds rounds if (prevLocation.distanceTo(MY_LOCATION) < movement_stuck_radius){ //Oops, I'm stuck System.out.println("Oh oops, I'm STUCK!!!"); am_stuck = true; cooldown_active = true; cooldownTimer = rc.getRoundNum() + cooldownRounds; } else{ am_stuck = false; if (rc.getRoundNum() > cooldownTimer) cooldown_active = false; } prevLocation = MY_LOCATION; } } System.out.println("After computing stuck: "+Integer.toString(Clock.getBytecodeNum())); if (target != null){ if (debug) rc.setIndicatorDot(target, 255, 255, 255); System.out.println(broadcaster.location_to_channel(target)); } } //========================================================================= //<---------------------------- GARDENER MICRO ---------------------------> //========================================================================= //<--------------------------- SPAWNING METRICS --------------------------> private boolean shouldCheeseScout(){ float tot_bullets = 0; for (TreeInfo t:SURROUNDING_TREES_NEUTRAL){ tot_bullets += t.containedBullets; } if (tot_bullets > 40f) return true; return false; } private boolean spawnLumberjackControl(){ if (rc.getTeamBullets() < RobotType.LUMBERJACK.bulletCost) return false; if (SURROUNDING_TREES_NEUTRAL.length == 0) return false; else { float cur_ratio = (float) surrounding_lumberjacks / SURROUNDING_TREES_NEUTRAL.length; if (cur_ratio > LUMBERJACK_TO_NEUTRAL_RATIO){ float tot_health_trees = 0f; for (TreeInfo t:SURROUNDING_TREES_NEUTRAL){ tot_health_trees += t.getHealth(); } if (rc.getRoundNum() < MID_GAME){ if (number_lumberjacks > LUMBERJACK_TO_SOLDIER_RATIO*Math.min(1,number_soldiers)) return false; //Cap lumberjacks below 2*soldier count up till mid game } if (tot_health_trees > 2*NEUTRAL_TREE_HEALTH_THRESHOLD) return true; else if (tot_health_trees > NEUTRAL_TREE_HEALTH_THRESHOLD && rc.getRoundNum() > lumberjackTimer) return true; } else return true; } return false; } private boolean spawnSoldierControl(){ if (rc.getTeamBullets() < RobotType.SOLDIER.bulletCost) return false; if (rc.getRoundNum() < MID_GAME && !enemyEngaged) return false; if (rc.getRoundNum() < soldierTimer){ if (rc.getTeamBullets() > BULLET_OVERFLOW) return true; else return false; } else{ if (number_soldiers < MIN_SOLDIER) return true; float cur_ratio = (float)number_soldiers/Math.max(1,rc.getTreeCount()); if (cur_ratio > SOLDIER_TO_TREE_RATIO) return false; else{ if (rc.getRoundNum() < MID_GAME){ System.out.println("I'm in mid game with: "+Integer.toString(number_soldiers)+" soldiers"); //We spawn more soldiers if the economy holds up (even above the set maximum) if (rc.getTreeCount() > 13){ //todo magic numbers :O if (rand.nextFloat() < SPAWN_SOLDIER_CHANCE) return true; } if (number_soldiers >= MAX_SOLDIER) return false; } if (rand.nextFloat() > SPAWN_SOLDIER_CHANCE) return false; } } return true; } private boolean spawnTankControl(){ //We enforce a sustainable economy before spawning tanks (happens towards late-game) if (rc.getTeamBullets() < RobotType.TANK.bulletCost) return false; if (rc.getRoundNum() < MID_GAME*2 && !enemyEngaged) return false; if (rc.getTreeCount() < SUSTAINABLE_ECONOMY) return false; if (rc.getRoundNum() < tankTimer){ if (rc.getTeamBullets() > BULLET_OVERFLOW) return true; else return false; } else{ float cur_ratio = (float)number_tanks/Math.max(1,rc.getTreeCount()); if (cur_ratio > TANK_TO_TREE_RATIO){ if (rc.getTeamBullets() > 450) return true; else return false; } else{ if (rand.nextFloat() > SPAWN_TANK_CHANCE) return false; } } return true; } private boolean spawnScoutControl(){ //Pretty much never happens cuz SPAWN_SCOUT_CHANCE is 1f :P (as per time of final submission) if (rc.getTeamBullets() < RobotType.SCOUT.bulletCost) return false; if (rc.getRoundNum() < scoutTimer) return false; else{ float cur_ratio = (float)number_scouts/Math.max(1,rc.getTreeCount()); if (cur_ratio > SCOUT_TO_TREE_RATIO || rand.nextFloat() > SPAWN_SCOUT_CHANCE) return false; } return true; } //<-------------------------- BUILDING/PLANTING --------------------------> private RobotType[] getBuildOrder() throws GameActionException { boolean build_order_empty = true; RobotType[] build_order = new RobotType[MAX_BUILD_ORDER]; for (int i=0;i<MAX_BUILD_ORDER;i++){ build_order[i] = robotType(rc.readBroadcast(BUILD_ORDER_QUEUE+i)); if (build_order[i] != null) build_order_empty = false; } if (build_order_empty) return build_order; //todo Vets build order //As the build order was evaluated by archons in round 1, we wanted gardeners themselves to further vet the build order //based on ground conditions after they were spawned. float est_size = ENEMY_ARCHON_LOCATION.distanceTo(MY_LOCATION); if (getDensity(MY_LOCATION, 5f, TREES) > FOREST_DENSITY_THRESHOLD){ System.out.println("DENSE MAP DETECTED"); //todo stable: lum_sol_sol //Left space for fine-tuning the starting build orders but never really gotten around to doing it //Extremely difficult to detect the correct build order for maps (limited sense range) if (est_size < MAP_SMALL){ if (build_order[0] != null){ build_order[0] = RobotType.LUMBERJACK; } if (build_order[1] != null){ build_order[1] = RobotType.SOLDIER; } } else if (est_size < MAP_LARGE){ if (build_order[0] != null){ build_order[0] = RobotType.LUMBERJACK; } if (build_order[1] != null){ build_order[1] = RobotType.SOLDIER; } } else { if (build_order[0] != null){ build_order[0] = RobotType.LUMBERJACK; } if (build_order[1] != null){ build_order[1] = RobotType.SOLDIER; } } } else { System.out.println("Clear Map detected"); if (build_order[0] != null){ build_order[0] = RobotType.SOLDIER; } if (build_order[1] != null){ build_order[1] = RobotType.SOLDIER; } } return build_order; } public boolean forceBuilding(RobotType robot) throws GameActionException { //Customized function for spawning units as gardeners reserves a SPAWN_DIR (faster computation) if (SPAWN_DIR != null){ if (rc.canBuildRobot(robot, SPAWN_DIR)){ rc.buildRobot(robot, SPAWN_DIR); return true; } } return super.forceBuilding(robot); } //<------------------------- TARGET ACQUISITION --------------------------> private boolean isSettleLocationValid(MapLocation loc) throws GameActionException { if (loc == null) return false; if (debug) rc.setIndicatorDot(loc, 0, 0, 255); if (localBlacklist.contains(loc)) return false; if (Clock.getBytecodeNum() > BYTE_THRESHOLD) return true; //Relaxed conditions if bytecode exceeded, better to move than freeze if (globalBlacklist.contains(loc)) return false; //If unable to get to target in 20 turns => local blacklist then choose new location if (rc.getRoundNum() >= settleTimer) { settleTimer = 1000000; localBlacklist.add(loc); return false; } if (rc.canSenseAllOfCircle(loc, rc.getType().bodyRadius)) { //If target is not on the map if (!rc.onTheMap(loc, rc.getType().bodyRadius)) { //Add this location to blacklist int num_blacklisted = rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS); rc.broadcast(BLACKLISTED_LOCATIONS_X + num_blacklisted * 2, floatToIntPrecise(loc.x)); rc.broadcast(BLACKLISTED_LOCATIONS_Y + num_blacklisted * 2, floatToIntPrecise(loc.y)); num_blacklisted += 1; rc.broadcast(BLACKLISTED_FARMING_LOCATIONS, num_blacklisted); if (debug) rc.setIndicatorLine(MY_LOCATION, loc, 255, 0, 0); localBlacklist.add(loc); return false; } //Occupied by gardener if (rc.isLocationOccupiedByRobot(loc)) { RobotInfo robot = rc.senseRobotAtLocation(loc); if (robot.getType() == RobotType.GARDENER && robot.getID() != rc.getID()) { //Add this location to blacklist int num_blacklisted = rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS); rc.broadcast(BLACKLISTED_LOCATIONS_X + num_blacklisted * 2, floatToIntPrecise(loc.x)); rc.broadcast(BLACKLISTED_LOCATIONS_Y + num_blacklisted * 2, floatToIntPrecise(loc.y)); num_blacklisted += 1; rc.broadcast(BLACKLISTED_FARMING_LOCATIONS, num_blacklisted); localBlacklist.add(loc); return false; } } //Occupied by local tree if (rc.isLocationOccupiedByTree(loc)) { //Add this location to local blacklist localBlacklist.add(loc); return false; } //Otherwise occupied but not by robot if (rc.isCircleOccupiedExceptByThisRobot(loc, rc.getType().bodyRadius)) { boolean canSkip = false; if (rc.isLocationOccupiedByRobot(loc) && rc.senseRobotAtLocation(loc).getType() != RobotType.GARDENER && rc.senseRobotAtLocation(loc).getType() != RobotType.ARCHON) { canSkip = true; } if (!canSkip) { //Add this location to blacklist int num_blacklisted = rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS); rc.broadcast(BLACKLISTED_LOCATIONS_X + num_blacklisted * 2, floatToIntPrecise(loc.x)); rc.broadcast(BLACKLISTED_LOCATIONS_Y + num_blacklisted * 2, floatToIntPrecise(loc.y)); num_blacklisted += 1; rc.broadcast(BLACKLISTED_FARMING_LOCATIONS, num_blacklisted); localBlacklist.add(loc); return false; } } } return true; } private MapLocation acquireSettleLocation() throws GameActionException { MapLocation targetLoc = null; int num_settle_pts = Math.min(rc.readBroadcast(AVAILABLE_FARMING_LOCATIONS), MAX_READ_SETTLE_PTS); float mindist = 10000000f; for (int i = 0; i < num_settle_pts; i++){ //todo bytecode escape //if (Clock.getBytecodeNum() > BYTE_THRESHOLD) continue; float locX = intToFloatPrecise(rc.readBroadcast(FARMING_LOCATIONS_X + i * 2)); float locY = intToFloatPrecise(rc.readBroadcast(FARMING_LOCATIONS_Y + i * 2)); MapLocation newPt = new MapLocation(locX, locY); //ignore locations on blacklist or local blacklist //if (blacklist.contains(newPt) || localBlacklist.contains(newPt)){ if (localBlacklist.contains(newPt) || globalBlacklist.contains(newPt)) continue; float dist = newPt.distanceTo(MY_LOCATION); if (dist > MIN_SEPARATION_DIST) continue; if (debug) rc.setIndicatorDot(newPt, 0, 255, 255); if (dist < mindist){ mindist = dist; targetLoc = newPt; } } if (targetLoc != null) settleTimer = rc.getRoundNum() + Math.max(LOCATION_TIMEOUT, Math.round(MY_LOCATION.distanceTo(targetLoc)/(rc.getType().strideRadius)/4)); else settleTimer = rc.getRoundNum() + LOCATION_TIMEOUT; return targetLoc; } private void moveToSettleLocation() throws GameActionException { if (target == null) return; //Error, target not set beforehand if (rc.hasMoved()) return; //Error, robot has already moved if (MY_LOCATION.isWithinDistance(target, rc.getType().strideRadius)) { if (debug) rc.setIndicatorLine(MY_LOCATION, target, 0, 255, 0); rc.move(target); MY_LOCATION = rc.getLocation(); //Force settle System.out.println("Forced Settle"); int validCnt = 0; float density = 999999f; int spawn_direction = -1; for (int i = 0; i < 6; i++) { if (!rc.isCircleOccupied(MY_LOCATION.add(HEX_DIR[i], rc.getType().bodyRadius + TREE_RADIUS), TREE_RADIUS) && rc.onTheMap(MY_LOCATION.add(HEX_DIR[i], rc.getType().bodyRadius + TREE_RADIUS), TREE_RADIUS)) { validCnt += 1; System.out.println("Available build slot at: " + Float.toString(HEX_DIR[i].radians)); float eval = getDensity(MY_LOCATION.add(HEX_DIR[i], MOVEMENT_SCANNING_DISTANCE), MOVEMENT_SCANNING_RADIUS, TREES); if (eval < density) { density = eval; spawn_direction = i; } } } System.out.println("validCnt: " + validCnt); if (spawn_direction == -1) { spawn_direction = 0; } SPAWN_DIR = HEX_DIR[spawn_direction]; settledDown = true; if (debug) rc.setIndicatorLine(MY_LOCATION, target, 255, 0, 0); } else { if (debug) rc.setIndicatorLine(MY_LOCATION, target, 255, 255, 0); if (am_stuck) { search_dir = !search_dir; am_stuck = false; staying_conut = 0; } moveToTargetExperimental(MY_LOCATION, SENSOR_GRANULARITY, search_dir, cooldown_active, SLUG); } } private void broadcastAvailableSettleLocations() throws GameActionException { int num_settle_pts = rc.readBroadcast(AVAILABLE_FARMING_LOCATIONS); for (int i = 0; i < 6; i++){ MapLocation newGardenerLoc = rc.getLocation().add(HEX_DIR[i], FARMER_CLEARANCE_RADIUS + EPSILON); if (globalBlacklist.contains(newGardenerLoc)) continue; //Stop broadcasting if location has been blacklisted if (broadcaster.isValidGrid(newGardenerLoc)){ //Imprecise but shouldn't matter much if (debug) rc.setIndicatorDot(newGardenerLoc, 0, 255, 0); rc.broadcast(FARMING_LOCATIONS_X + 2 * num_settle_pts, floatToIntPrecise(newGardenerLoc.x)); rc.broadcast(FARMING_LOCATIONS_Y + 2 * num_settle_pts, floatToIntPrecise(newGardenerLoc.y)); num_settle_pts += 1; } } rc.broadcast(AVAILABLE_FARMING_LOCATIONS, num_settle_pts); } //<------------------------- MOVEMENT ALGORITHMS -------------------------> private float gardenerSurroundingDensity(MapLocation travelLoc) throws GameActionException { float curDensity = 0; for (int i=0;i<6;i++){ //Scan in a hexagonal pattern around the gardener MapLocation testLoc = travelLoc.add(new Direction(DEG_60*i), rc.getType().bodyRadius + 1f + EPSILON); if (!rc.onTheMap(testLoc.add(new Direction(DEG_60*i), 0.5f), 0.25f)){ curDensity += 1f; } else curDensity += getDensity(testLoc, 1f, OBJECTS); } return curDensity; } //todo bytecode saving metric private float gardenerDensity(MapLocation travelLoc) throws GameActionException { return getDensity(travelLoc, 3f, OBJECTS); } private boolean gardenerDensityMove(int GRANULARITY) throws GameActionException { if (cooldown_active) { if (cooldownDir == null) { //Look further away (long-range density) --> should escape local minimum Direction initDir = MY_LOCATION.directionTo(ENEMY_ARCHON_LOCATION).opposite(); float curScore = 0f; Direction tryDir = null; float bestScore = 99999f; Direction bestDir = null; MapLocation tryLoc = null; tryDir = initDir; tryLoc = MY_LOCATION.add(tryDir, rc.getType().sensorRadius-2f); curScore = getDensity(tryLoc, 2f, OBJECTS); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; } for (int i = 1; i < GRANULARITY / 2; i++) { if (Clock.getBytecodeNum() > BYTE_THRESHOLD) continue; tryDir = initDir.rotateRightRads(i * DEG_360 / GRANULARITY); tryLoc = MY_LOCATION.add(tryDir, rc.getType().sensorRadius-2f); curScore = getDensity(tryLoc, 2f, OBJECTS); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; } tryDir = initDir.rotateLeftRads(i * DEG_360 / GRANULARITY); tryLoc = MY_LOCATION.add(tryDir, rc.getType().sensorRadius-2f); curScore = getDensity(tryLoc, 2f, OBJECTS); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; } } if (bestDir != null){ cooldownDir = bestDir; } else{ cooldownDir = initDir; } } target = MY_LOCATION.add(cooldownDir, 5f); if (am_stuck) { search_dir = !search_dir; am_stuck = false; staying_conut = 0; } moveToTargetExperimental(MY_LOCATION, SENSOR_GRANULARITY, search_dir, cooldown_active, SLUG); } else{ cooldownDir = null; //Look close to robot Direction initDir = MY_LOCATION.directionTo(ENEMY_ARCHON_LOCATION).opposite(); //if (SPAWNING_ARCHON_LOCATION != null) initDir = SPAWNING_ARCHON_LOCATION.directionTo(MY_LOCATION); Direction bestDir = null; float bestDist = 0f; for (int j = 0; j < 1; j++) { float curScore = 0f; Direction tryDir = null; float bestScore = 99999f; float tryDist = (rc.getType().strideRadius) / (float) Math.pow(2, j); tryDir = initDir; if (rc.canMove(tryDir, tryDist)) { curScore = gardenerSurroundingDensity(MY_LOCATION.add(tryDir, tryDist)); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; bestDist = tryDist; if (curScore == 0) break; //Already minimum density direction :) } } for (int i = 1; i < GRANULARITY / 2; i++) { System.out.println("Each pass byte cost: "+Clock.getBytecodeNum()); //if (Clock.getBytecodeNum() > BYTE_THRESHOLD) continue; tryDir = initDir.rotateRightRads(i * DEG_360 / GRANULARITY); if (rc.canMove(tryDir, tryDist)) { curScore = gardenerSurroundingDensity(MY_LOCATION.add(tryDir, tryDist)); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; bestDist = tryDist; if (curScore == 0) break; //Already minimum density direction :) } } tryDir = initDir.rotateLeftRads(i * DEG_360 / GRANULARITY); if (rc.canMove(tryDir, tryDist)) { curScore = gardenerSurroundingDensity(MY_LOCATION.add(tryDir, tryDist)); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; bestDist = tryDist; if (curScore == 0) break; //Already minimum density direction :) } } } } if (bestDir != null && bestDist != 0f) { if (rc.canMove(bestDir, bestDist)) { rc.move(bestDir, bestDist); return true; } } } return false; } }
src/Scarif_stable/GardenerPlayer.java
package Scarif_stable; import battlecode.common.*; import java.util.ArrayList; import java.util.HashSet; public class GardenerPlayer extends BasePlayer { //Gardener specific variables private int treeTimer = 0; private int lumberjackTimer = 0; private int soldierTimer = 0; private int scoutTimer = 0; private int tankTimer = 0; static float FARMER_CLEARANCE_RADIUS = 8f; //How far apart should farmers be static final float TREE_RADIUS = 1f; //Radius of player trees private RobotType[] build_order; private boolean build_queue_empty = true; private boolean has_built_robot = false; private boolean enemyEngaged = false; private boolean CHEESE_SCOUT = true; //Farming stuff private MapLocation SPAWNING_ARCHON_LOCATION = null; private MapLocation LAST_KNOWN_GARDENER_LOCATION = null; private boolean settledDown = false; static HashSet<MapLocation> localBlacklist = new HashSet<>(); static HashSet<MapLocation> globalBlacklist = new HashSet<>(); static int globalBlacklist_index = 0; static final int LOCATION_TIMEOUT = 50; //Timeout to get to a new location static int settleTimer; static final int MAX_READ_SETTLE_PTS = 50; //Maximum number of settle pts read static int MAX_INIT_TREES = 2; //Max number of early game trees private ArrayList<Direction> current_available_slots = new ArrayList<>(); //Reads current unit composition static int number_gardeners = 0; static int number_lumberjacks = 0; private int surrounding_lumberjacks = 0; static int number_scouts = 0; static int number_soldiers = 0; static int number_tanks = 0; //Spawning constants (Macro control) private Direction SPAWN_DIR = null; //Reserved spawning slot once in position private float BULLET_OVERFLOW = 347; //Overflowing bullets, build something! static final int SOLDIER_DELAY = 17; //Number of turns to spawn a new soldier private float SPAWN_SOLDIER_CHANCE = 0.75f; private float SOLDIER_TO_TREE_RATIO = 1.23f; //Max number of soldiers per tree static int MIN_SOLDIER = 5; //Minimum number of soldiers static int MAX_SOLDIER = 10; //Early game max soldiers static final int SCOUT_DELAY = 100; //Number of turns to spawn a new scout private float SPAWN_SCOUT_CHANCE = 0.0f; private float SCOUT_TO_TREE_RATIO = 0.25f; //Max number of scouts per tree static final int TANK_DELAY = 13; //Number of turns to spawn a new tank private float SPAWN_TANK_CHANCE = 1f; private float TANK_TO_TREE_RATIO = 0.25f; //Max number of tanks per tree private float TANK_DENSITY_THRESHOLD = 0.15f; //Surrounding density < this before tanks can be spawned private float LUMBERJACK_TO_NEUTRAL_RATIO = 0.43f; //Number of lumbers per tree surrounding gardener private float LUMBERJACK_TO_SOLDIER_RATIO = 2f; //Number of lumbers per soldier private final int LUMBERJACK_DELAY = 30; //Number of turns to spawn a new lumberjack private float NEUTRAL_TREE_HEALTH_THRESHOLD = 750f; //Total accumulative health of surrounding trees | above which will spawn lumberjacks //Movement stuff static MapLocation prevLocation = null; //Previous location static int staying_conut = 0; //How many turns spent around there static int stuckRounds = 11; //How many rounds before I'm stuck static int cooldownRounds = 17; static int cooldownTimer = 0; //Turns to run slug path before reverting to direct pathing static float movement_stuck_radius = RobotType.GARDENER.strideRadius/8*stuckRounds; //Stuck distance for 17 rounds static boolean am_stuck = false; //I am stuck where I am... static boolean cooldown_active = false; //Cooldown period where direct pathing is disabled static boolean search_dir = true; //Right/Left bot, true == Right bot private int timeToTarget = 0; private int targetTimer = THE_FINAL_PROBLEM; private int SLUG = 3; //How many levels of slugging private Direction cooldownDir = null; //Try to escape local minimum /* GardenerPlayer is one of our most extensive pieces of code (2nd longest after BasePlayer) as it controls the critical macro strategy Basic logic structure of our gardeners: settleDown = can_settle_down(); if (!settleDown){ if (!is_target_valid(target)){ acquire_target(); } else{ move_to_target(); } } else{ broadcast_locations_for_other_gardeners(); reserve_slot_for_spawning(); spam_trees(); //Max 5 } spawnUnits(); Ofc there's a bunch of additional checks and ad-hoc code in there, but that's about it for our gardeners. We experimented with a spade of different farming techniques for gardeners and in the end, settled on individual hexagonal flower design in a bigger hexagonal lattice. It provided the best balance between settling speed, space efficiency as well as gardener-to-tree ratio. Some points of interest in our gardener code: --> Short-range/Long-range min density-sensing movement --> Scans for local minimum density and moves towards a position that evaluates as such --> If stuck, try scanning further to escape local minima --> Build-order queue --> At high-level plays, mastering the initial build order is critical --> Games are frequently decided in the first 100 rounds --> Sensing valid building locations --> Finer sensing than simply a free circle of radius 3 around gardener If you're a participant, probably such considerations have already crossed your mind at some point during the competition. A stable gardener code is the single most critical piece of code in this year's tournament (well, aside from getting that gardener out from the archons). Personally, our team floundered at the Qualifying tournament due to having unstable gardener code >.< they failed to settle down fast enough and thus resulted in a delayed economic buildup. So to those teams out there (and future participants reading this), identify critical pieces of code early and stabilize them before moving on... */ public GardenerPlayer(RobotController rc) throws GameActionException { super(rc); try { initPlayer(); } catch(Exception e){ e.printStackTrace(); } for (; ; Clock.yield()) { try { startRound(); run(); endRound(); } catch(Exception e){ e.printStackTrace(); } } } private void initPlayer() throws GameActionException { MOVEMENT_SCANNING_DISTANCE = 3.1f; MOVEMENT_SCANNING_RADIUS = 2.1f; SURROUNDING_ROBOTS_OWN = rc.senseNearbyRobots(3f, rc.getTeam()); for (RobotInfo r:SURROUNDING_ROBOTS_OWN){ if (r.type == RobotType.ARCHON){ SPAWNING_ARCHON_LOCATION = r.location; break; } } //todo I'm debugging this... debug = false; //Turn this on for visualizations as to how our gardeners work } public void startRound() throws GameActionException { super.startRound(); SURROUNDING_TREES_OWN = rc.senseNearbyTrees(3f, rc.getTeam()); if (settledDown){ rc.broadcast(SUM_SETTLED,rc.readBroadcast(SUM_SETTLED)+1); } else{ rc.broadcast(SUM_GARDENERS,rc.readBroadcast(SUM_GARDENERS)+1); } build_order = getBuildOrder(); build_queue_empty = true; has_built_robot = false; if (SURROUNDING_ROBOTS_ENEMY.length > 0){ contactRep(); } //Reset farmer settling counter - distributing this piece of control code provides redundancy (what if our archon was destroyed?) if (rc.readBroadcast(CONTROL_FARMER_HEARTBEAT) <= Math.max(0, rc.getRoundNum() - 3)){ rc.broadcast(CONTROL_FARMER, rc.getID()); rc.broadcast(AVAILABLE_FARMING_LOCATIONS, 0); } if (rc.readBroadcast(CONTROL_FARMER) == rc.getID()) { rc.setIndicatorLine(MY_LOCATION, MY_LOCATION.add(new Direction(0f),2f), 0, 0, 0); rc.broadcast(AVAILABLE_FARMING_LOCATIONS, 0); rc.broadcast(CONTROL_FARMER_HEARTBEAT, rc.getRoundNum()); if (rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS) > MAX_BLACKLISTED_LOCATIONS){ rc.broadcast(BLACKLISTED_FARMING_LOCATIONS, 0); } } number_gardeners = rc.readBroadcast(COUNT_GARDENERS); number_lumberjacks = rc.readBroadcast(COUNT_LUMBERJACKS); number_scouts = rc.readBroadcast(COUNT_SCOUTS); number_soldiers = rc.readBroadcast(COUNT_SOLDIERS); number_tanks = rc.readBroadcast(COUNT_TANKS); surrounding_lumberjacks = 0; for (RobotInfo r:SURROUNDING_ROBOTS_OWN){ if (r.type == RobotType.LUMBERJACK) surrounding_lumberjacks++; } //todo check for last known gardener position (sticks close together :D) //We were thinking of some sort of flocking behavior for gardeners (not to stray too far away from others) for (RobotInfo r:SURROUNDING_ROBOTS_OWN){ if (r.type == RobotType.GARDENER){ LAST_KNOWN_GARDENER_LOCATION = r.location; break; } } if (LAST_KNOWN_GARDENER_LOCATION != null) if (debug) rc.setIndicatorLine(MY_LOCATION, LAST_KNOWN_GARDENER_LOCATION, 0, 255, 0); //Check whether enemy has been engaged! if (!enemyEngaged) enemyEngaged = (rc.readBroadcast(ENEMY_ENGAGED) == 1); //Read and store global blacklisted locations //Fetch global blacklist only once (and as blacklisted positions are only appended - previous entries immutable, we simply get updates) int blacklist_size = rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS); System.out.println("Total Blacklisted locations: " + Integer.toString(blacklist_size)); for (int i = globalBlacklist_index; i < blacklist_size; i++){ float locX = intToFloatPrecise(rc.readBroadcast(BLACKLISTED_LOCATIONS_X + i * 2)); float locY = intToFloatPrecise(rc.readBroadcast(BLACKLISTED_LOCATIONS_Y + i * 2)); globalBlacklist.add(new MapLocation(locX, locY)); if (debug) rc.setIndicatorDot(new MapLocation(locX, locY), 255, 255, 0); } globalBlacklist_index = blacklist_size; } private void run() throws GameActionException { //Follow build order for (int i=0;i<MAX_BUILD_ORDER;i++){ if (build_order[i]!=null){ System.out.println("Building: "+build_order[i]); build_queue_empty = false; if (forceBuilding(build_order[i])){ rc.broadcast(BUILD_ORDER_QUEUE+i, 0); } break; } } //Cheese dat scout out if (CHEESE_SCOUT && build_order[0] == null && build_order[1] == null){ if (shouldCheeseScout() || rc.readBroadcast(FORCE_SCOUT_CHEESE) > 80) { //Reads whether that single lone scout has been spawned to collect bullets from trees (thus we cap at 1 scout per game) CHEESE_SCOUT = (rc.readBroadcast(SCOUT_CHEESE_SIGNAL) == 0); if (CHEESE_SCOUT){ if(forceBuilding(RobotType.SCOUT)){ System.out.println("Scout cheeser sent!"); rc.broadcast(SCOUT_CHEESE_SIGNAL, 1); } } } } //Plant trees if (!settledDown){ if (!rc.isCircleOccupiedExceptByThisRobot(MY_LOCATION, 3.01f) && rc.onTheMap(MY_LOCATION, 3f)) current_available_slots = HEXAGONAL_PATTERN; //Gardener starts to farm early //We relax conditions for first farm to start (expedites economic buildup) else if (rc.getTreeCount() == 0) current_available_slots = getBuildSlots(MY_LOCATION, 2); else current_available_slots = getBuildSlots(MY_LOCATION, FARMER_MIN_SLOTS); if (current_available_slots.size() > 0) { //Get most spacious direction - hopefully spawning in this direction will not get units stuck float density = 999999f; int spawn_direction = -1; System.out.println("Total build slots: " + Integer.toString(current_available_slots.size())); for (int i = 0; i < current_available_slots.size(); i++) { System.out.println("Available build slot at: " + Float.toString(current_available_slots.get(i).radians)); float eval = getDensity(MY_LOCATION.add(current_available_slots.get(i), MOVEMENT_SCANNING_DISTANCE), MOVEMENT_SCANNING_RADIUS, OBJECTS); if (eval < density) { density = eval; spawn_direction = i; } } if (spawn_direction != -1) { SPAWN_DIR = current_available_slots.get(spawn_direction); //We reserve a direction for spawning units current_available_slots.remove(spawn_direction); } } } else if (current_available_slots.size() < 1){ //Once settled down, hinge build_slots around spawn_direction (assume hextet of slots are free) //Using previously computed SPAWN_DIR to hinge the other 5 slots around reduces bytecode usage as we won't need to //test surroundings for build slots again. if (SPAWN_DIR != null){ current_available_slots.clear(); for (int i=1;i<6;i++){ current_available_slots.add(SPAWN_DIR.rotateRightRads(DEG_60*i)); } } else { //If SPAWN_DIR is not set, search around for free slots assign SPAWN_DIR current_available_slots = getBuildSlots(MY_LOCATION, 2); if (current_available_slots.size() > 0){ SPAWN_DIR = current_available_slots.get(0); current_available_slots.remove(0); } } } //Debug bytecode costs System.out.println("Bytes used to compute build locations: "+Integer.toString(Clock.getBytecodeNum())); //Debug for spawn direction if (SPAWN_DIR != null) System.out.println("SPAWN_DIR: "+Float.toString(SPAWN_DIR.radians)); //Debug settling down System.out.println("Have I settled? "+Boolean.toString(settledDown)); System.out.println("Max slots around me: "+Integer.toString(Math.max(FARMER_MIN_SLOTS-SURROUNDING_TREES_OWN.length,1))); if (!settledDown){ //Try to spawn a lumberjack //Check if there are trees around if (rc.getRoundNum() > lumberjackTimer) { forceBuilding(RobotType.LUMBERJACK); lumberjackTimer = rc.getRoundNum() + LUMBERJACK_DELAY; } //Acquire new target if none is currently set if (target == null || !isSettleLocationValid(target)){ System.out.println("Computed target validity: "+Clock.getBytecodeNum()); target = acquireSettleLocation(); System.out.println("Acquired target: "+Clock.getBytecodeNum()); } //Move to target location if (target == null){ if (debug) rc.setIndicatorDot(MY_LOCATION, 255, 0, 0); gardenerDensityMove(GARDENER_MOVE_GRANULARITY); //Initial movement algo for the first gardener (or if no nearby locations are available) System.out.println("Computed density move direction: "+Clock.getBytecodeNum()); } else{ if (debug) rc.setIndicatorDot(MY_LOCATION, 0, 255, 0); moveToSettleLocation(); //Navigate to nearest available location (on that global hexagonal lattice for tree clusters) System.out.println("Computed move to target: "+Clock.getBytecodeNum()); } //todo don't settledown if entire grove of trees won't be on the map // && rc.onTheMap(MY_LOCATION, 3f) //We decided to relax the conditions as planting some trees is better off than delaying planting trees (huge economic falloff) if (target == null) { if (rc.getTreeCount() == 0) { //If you're the first gardener to start a cluster of trees, ignore spreading out from surrounding gardeners if (current_available_slots.size() > 0 && isSpreadOut(MY_LOCATION, FARMER_CLEARANCE_RADIUS, false)) settledDown = true; } else if (current_available_slots.size() > 0 && isSpreadOut(MY_LOCATION, FARMER_CLEARANCE_RADIUS, true)) settledDown = true; } } else{ //Announce locations around itself broadcastAvailableSettleLocations(); //Should I plant trees? boolean should_plant_trees = (rc.getRoundNum() > treeTimer); //Maintain a basic army before planting too many trees if (build_order[0] != null && build_order[1] != null && rc.getRoundNum() < EARLY_GAME){ //Skip building trees till first 2 soldiers are out :) should_plant_trees = false; } if (spawnSoldierControl()){ //If I should spawn a lumberjack or soldier or if I've tons of bullets, go build these units first :) should_plant_trees = false; } if (rc.getRoundNum() < EARLY_GAME && SURROUNDING_TREES_OWN.length >= MAX_INIT_TREES && enemyEngaged){ //Stop planting too many trees early game should_plant_trees = false; } //I have tons of bullets, might as well... if (rc.getTeamBullets() > BULLET_OVERFLOW) should_plant_trees = true; //Plant trees if (should_plant_trees) { //Plant trees for (Direction dir : current_available_slots) { System.out.println("Build slot at: "+dir); if (rc.canPlantTree(dir) && rand.nextFloat() < PLANT_TREE_CHANCE) { rc.plantTree(dir); treeTimer = rc.getRoundNum() + TREE_DELAY; } } //Try forcing 5 trees around gardener if no explicit build slots are found if (rc.getRoundNum() > treeTimer && SURROUNDING_TREES_OWN.length < 5 && SURROUNDING_TREES_OWN.length > 0) { if (debug && SPAWN_DIR != null) rc.setIndicatorLine(MY_LOCATION, MY_LOCATION.add(SPAWN_DIR, 3f), 0, 255, 255); Direction cDir = MY_LOCATION.directionTo(SURROUNDING_TREES_OWN[0].location); for (int i = 0; i < 5; i++) { cDir = cDir.rotateRightRads(DEG_60); if (rand.nextFloat() < PLANT_TREE_CHANCE) { System.out.println("Forcing planting of tree at: " + Float.toString(cDir.radians)); System.out.println("Radians between spawn_dir and cDir: "+Float.toString(SPAWN_DIR.radiansBetween(cDir))); if (absRad(SPAWN_DIR.radiansBetween(cDir)) >= DEG_60 && rc.canPlantTree(cDir)){ rc.plantTree(cDir); treeTimer = rc.getRoundNum() + TREE_DELAY; } } } } } //Water surrounding trees TreeInfo lowestHealthTree = null; float lowestHealth = 1000000f; for (TreeInfo tree : SURROUNDING_TREES_OWN) { if (tree.getHealth() < lowestHealth) { lowestHealth = tree.getHealth(); lowestHealthTree = tree; } } if (lowestHealthTree != null) { if (rc.canWater(lowestHealthTree.getID())) { try { rc.water(lowestHealthTree.getID()); } catch (GameActionException e) { e.printStackTrace(); } } } } //Build robots if (build_queue_empty){ if (SURROUNDING_ROBOTS_ENEMY.length > 0){ //Respond to counter enemy units if (!has_built_robot) { switch (SURROUNDING_ROBOTS_ENEMY[0].type) { case ARCHON: has_built_robot = forceBuilding(RobotType.SOLDIER); break; case LUMBERJACK: has_built_robot = forceBuilding(RobotType.SOLDIER); break; case SCOUT: has_built_robot = forceBuilding(RobotType.LUMBERJACK); break; case SOLDIER: has_built_robot = forceBuilding(RobotType.SOLDIER); break; case GARDENER: has_built_robot = forceBuilding(RobotType.LUMBERJACK); break; case TANK: has_built_robot = forceBuilding(RobotType.TANK); break; } } } //The ordering of spawning is important as those placed first has priority if (spawnLumberjackControl() && !has_built_robot) { has_built_robot = forceBuilding(RobotType.LUMBERJACK); if (has_built_robot) lumberjackTimer = rc.getRoundNum() + LUMBERJACK_DELAY; } if (spawnTankControl() && !has_built_robot) { has_built_robot = forceBuilding(RobotType.TANK); if (has_built_robot) tankTimer = rc.getRoundNum() + TANK_DELAY; } if (spawnSoldierControl() && !has_built_robot) { has_built_robot = forceBuilding(RobotType.SOLDIER); if (has_built_robot) soldierTimer = rc.getRoundNum() + SOLDIER_DELAY; } if (spawnScoutControl() && !has_built_robot) { //If you look closely, we pretty much never spawn scouts (but...just in case) has_built_robot = forceBuilding(RobotType.SCOUT); if (has_built_robot) scoutTimer = rc.getRoundNum() + SCOUT_DELAY; } } return; } public void endRound() throws GameActionException { super.endRound(); System.out.println("Before computing stuck: "+Integer.toString(Clock.getBytecodeNum())); if (prevLocation == null){ prevLocation = MY_LOCATION; } else{ if (rc.getRoundNum() % stuckRounds == 0){ //Check every stuckRounds rounds if (prevLocation.distanceTo(MY_LOCATION) < movement_stuck_radius){ //Oops, I'm stuck System.out.println("Oh oops, I'm STUCK!!!"); am_stuck = true; cooldown_active = true; cooldownTimer = rc.getRoundNum() + cooldownRounds; } else{ am_stuck = false; if (rc.getRoundNum() > cooldownTimer) cooldown_active = false; } prevLocation = MY_LOCATION; } } System.out.println("After computing stuck: "+Integer.toString(Clock.getBytecodeNum())); if (target != null){ if (debug) rc.setIndicatorDot(target, 255, 255, 255); System.out.println(broadcaster.location_to_channel(target)); } } //========================================================================= //<---------------------------- GARDENER MICRO ---------------------------> //========================================================================= //<--------------------------- SPAWNING METRICS --------------------------> private boolean shouldCheeseScout(){ float tot_bullets = 0; for (TreeInfo t:SURROUNDING_TREES_NEUTRAL){ tot_bullets += t.containedBullets; } if (tot_bullets > 40f) return true; return false; } private boolean spawnLumberjackControl(){ if (rc.getTeamBullets() < RobotType.LUMBERJACK.bulletCost) return false; if (SURROUNDING_TREES_NEUTRAL.length == 0) return false; else { float cur_ratio = (float) surrounding_lumberjacks / SURROUNDING_TREES_NEUTRAL.length; if (cur_ratio > LUMBERJACK_TO_NEUTRAL_RATIO){ float tot_health_trees = 0f; for (TreeInfo t:SURROUNDING_TREES_NEUTRAL){ tot_health_trees += t.getHealth(); } if (rc.getRoundNum() < MID_GAME){ if (number_lumberjacks > LUMBERJACK_TO_SOLDIER_RATIO*Math.min(1,number_soldiers)) return false; //Cap lumberjacks below 2*soldier count up till mid game } if (tot_health_trees > 2*NEUTRAL_TREE_HEALTH_THRESHOLD) return true; else if (tot_health_trees > NEUTRAL_TREE_HEALTH_THRESHOLD && rc.getRoundNum() > lumberjackTimer) return true; } else return true; } return false; } private boolean spawnSoldierControl(){ if (rc.getTeamBullets() < RobotType.SOLDIER.bulletCost) return false; if (rc.getRoundNum() < MID_GAME && !enemyEngaged) return false; if (rc.getRoundNum() < soldierTimer){ if (rc.getTeamBullets() > BULLET_OVERFLOW) return true; else return false; } else{ if (number_soldiers < MIN_SOLDIER) return true; float cur_ratio = (float)number_soldiers/Math.max(1,rc.getTreeCount()); if (cur_ratio > SOLDIER_TO_TREE_RATIO) return false; else{ if (rc.getRoundNum() < MID_GAME){ System.out.println("I'm in mid game with: "+Integer.toString(number_soldiers)+" soldiers"); //We spawn more soldiers if the economy holds up (even above the set maximum) if (rc.getTreeCount() > 13){ //todo magic numbers :O if (rand.nextFloat() < SPAWN_SOLDIER_CHANCE) return true; } if (number_soldiers >= MAX_SOLDIER) return false; } if (rand.nextFloat() > SPAWN_SOLDIER_CHANCE) return false; } } return true; } private boolean spawnTankControl(){ //We enforce a sustainable economy before spawning tanks (happens towards late-game) if (rc.getTeamBullets() < RobotType.TANK.bulletCost) return false; if (rc.getRoundNum() < MID_GAME*2 && !enemyEngaged) return false; if (rc.getTreeCount() < SUSTAINABLE_ECONOMY) return false; if (rc.getRoundNum() < tankTimer){ if (rc.getTeamBullets() > BULLET_OVERFLOW) return true; else return false; } else{ float cur_ratio = (float)number_tanks/Math.max(1,rc.getTreeCount()); if (cur_ratio > TANK_TO_TREE_RATIO){ if (rc.getTeamBullets() > 450) return true; else return false; } else{ if (rand.nextFloat() > SPAWN_TANK_CHANCE) return false; } } return true; } private boolean spawnScoutControl(){ //Pretty much never happens cuz SPAWN_SCOUT_CHANCE is 1f :P (as per time of final submission) if (rc.getTeamBullets() < RobotType.SCOUT.bulletCost) return false; if (rc.getRoundNum() < scoutTimer) return false; else{ float cur_ratio = (float)number_scouts/Math.max(1,rc.getTreeCount()); if (cur_ratio > SCOUT_TO_TREE_RATIO || rand.nextFloat() > SPAWN_SCOUT_CHANCE) return false; } return true; } //<-------------------------- BUILDING/PLANTING --------------------------> private RobotType[] getBuildOrder() throws GameActionException { boolean build_order_empty = true; RobotType[] build_order = new RobotType[MAX_BUILD_ORDER]; for (int i=0;i<MAX_BUILD_ORDER;i++){ build_order[i] = robotType(rc.readBroadcast(BUILD_ORDER_QUEUE+i)); if (build_order[i] != null) build_order_empty = false; } if (build_order_empty) return build_order; //todo Vets build order //As the build order was evaluated by archons in round 1, we wanted gardeners themselves to further vet the build order //based on ground conditions after they were spawned. float est_size = ENEMY_ARCHON_LOCATION.distanceTo(MY_LOCATION); if (getDensity(MY_LOCATION, 5f, TREES) > FOREST_DENSITY_THRESHOLD){ System.out.println("DENSE MAP DETECTED"); //todo stable: lum_sol_sol //Left space for fine-tuning the starting build orders but never really gotten around to doing it //Extremely difficult to detect the correct build order for maps (limited sense range) if (est_size < MAP_SMALL){ if (build_order[0] != null){ build_order[0] = RobotType.LUMBERJACK; } if (build_order[1] != null){ build_order[1] = RobotType.SOLDIER; } } else if (est_size < MAP_LARGE){ if (build_order[0] != null){ build_order[0] = RobotType.LUMBERJACK; } if (build_order[1] != null){ build_order[1] = RobotType.SOLDIER; } } else { if (build_order[0] != null){ build_order[0] = RobotType.LUMBERJACK; } if (build_order[1] != null){ build_order[1] = RobotType.SOLDIER; } } } else { System.out.println("Clear Map detected"); if (build_order[0] != null){ build_order[0] = RobotType.SOLDIER; } if (build_order[1] != null){ build_order[1] = RobotType.SOLDIER; } } return build_order; } public boolean forceBuilding(RobotType robot) throws GameActionException { //Customized function for spawning units as gardeners reserves a SPAWN_DIR (faster computation) if (SPAWN_DIR != null){ if (rc.canBuildRobot(robot, SPAWN_DIR)){ rc.buildRobot(robot, SPAWN_DIR); return true; } } return super.forceBuilding(robot); } //<------------------------- TARGET ACQUISITION --------------------------> private boolean isSettleLocationValid(MapLocation loc) throws GameActionException { if (loc == null) return false; if (debug) rc.setIndicatorDot(loc, 0, 0, 255); if (localBlacklist.contains(loc)) return false; if (Clock.getBytecodeNum() > BYTE_THRESHOLD) return true; //Relaxed conditions if bytecode exceeded, better to move than freeze if (globalBlacklist.contains(loc)) return false; //If unable to get to target in 20 turns => local blacklist then choose new location if (rc.getRoundNum() >= settleTimer) { settleTimer = 1000000; localBlacklist.add(loc); return false; } if (rc.canSenseAllOfCircle(loc, rc.getType().bodyRadius)) { //If target is not on the map if (!rc.onTheMap(loc, rc.getType().bodyRadius)) { //Add this location to blacklist int num_blacklisted = rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS); rc.broadcast(BLACKLISTED_LOCATIONS_X + num_blacklisted * 2, floatToIntPrecise(loc.x)); rc.broadcast(BLACKLISTED_LOCATIONS_Y + num_blacklisted * 2, floatToIntPrecise(loc.y)); num_blacklisted += 1; rc.broadcast(BLACKLISTED_FARMING_LOCATIONS, num_blacklisted); if (debug) rc.setIndicatorLine(MY_LOCATION, loc, 255, 0, 0); localBlacklist.add(loc); return false; } //Occupied by gardener if (rc.isLocationOccupiedByRobot(loc)) { RobotInfo robot = rc.senseRobotAtLocation(loc); if (robot.getType() == RobotType.GARDENER && robot.getID() != rc.getID()) { //Add this location to blacklist int num_blacklisted = rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS); rc.broadcast(BLACKLISTED_LOCATIONS_X + num_blacklisted * 2, floatToIntPrecise(loc.x)); rc.broadcast(BLACKLISTED_LOCATIONS_Y + num_blacklisted * 2, floatToIntPrecise(loc.y)); num_blacklisted += 1; rc.broadcast(BLACKLISTED_FARMING_LOCATIONS, num_blacklisted); localBlacklist.add(loc); return false; } } //Occupied by local tree if (rc.isLocationOccupiedByTree(loc)) { //Add this location to local blacklist localBlacklist.add(loc); return false; } //Otherwise occupied but not by robot if (rc.isCircleOccupiedExceptByThisRobot(loc, rc.getType().bodyRadius)) { boolean canSkip = false; if (rc.isLocationOccupiedByRobot(loc) && rc.senseRobotAtLocation(loc).getType() != RobotType.GARDENER && rc.senseRobotAtLocation(loc).getType() != RobotType.ARCHON) { canSkip = true; } if (!canSkip) { //Add this location to blacklist int num_blacklisted = rc.readBroadcast(BLACKLISTED_FARMING_LOCATIONS); rc.broadcast(BLACKLISTED_LOCATIONS_X + num_blacklisted * 2, floatToIntPrecise(loc.x)); rc.broadcast(BLACKLISTED_LOCATIONS_Y + num_blacklisted * 2, floatToIntPrecise(loc.y)); num_blacklisted += 1; rc.broadcast(BLACKLISTED_FARMING_LOCATIONS, num_blacklisted); localBlacklist.add(loc); return false; } } } return true; } private MapLocation acquireSettleLocation() throws GameActionException { MapLocation targetLoc = null; int num_settle_pts = Math.min(rc.readBroadcast(AVAILABLE_FARMING_LOCATIONS), MAX_READ_SETTLE_PTS); float mindist = 10000000f; for (int i = 0; i < num_settle_pts; i++){ //todo bytecode escape //if (Clock.getBytecodeNum() > BYTE_THRESHOLD) continue; float locX = intToFloatPrecise(rc.readBroadcast(FARMING_LOCATIONS_X + i * 2)); float locY = intToFloatPrecise(rc.readBroadcast(FARMING_LOCATIONS_Y + i * 2)); MapLocation newPt = new MapLocation(locX, locY); //ignore locations on blacklist or local blacklist //if (blacklist.contains(newPt) || localBlacklist.contains(newPt)){ if (localBlacklist.contains(newPt) || globalBlacklist.contains(newPt)) continue; float dist = newPt.distanceTo(MY_LOCATION); if (debug) rc.setIndicatorDot(newPt, 0, 255, 255); if (dist < mindist){ mindist = dist; targetLoc = newPt; } } if (targetLoc != null) settleTimer = rc.getRoundNum() + Math.max(LOCATION_TIMEOUT, Math.round(MY_LOCATION.distanceTo(targetLoc)/(rc.getType().strideRadius)/4)); else settleTimer = rc.getRoundNum() + LOCATION_TIMEOUT; return targetLoc; } private void moveToSettleLocation() throws GameActionException { if (target == null) return; //Error, target not set beforehand if (rc.hasMoved()) return; //Error, robot has already moved if (MY_LOCATION.isWithinDistance(target, rc.getType().strideRadius)) { if (debug) rc.setIndicatorLine(MY_LOCATION, target, 0, 255, 0); rc.move(target); MY_LOCATION = rc.getLocation(); //Force settle System.out.println("Forced Settle"); int validCnt = 0; float density = 999999f; int spawn_direction = -1; for (int i = 0; i < 6; i++) { if (!rc.isCircleOccupied(MY_LOCATION.add(HEX_DIR[i], rc.getType().bodyRadius + TREE_RADIUS), TREE_RADIUS) && rc.onTheMap(MY_LOCATION.add(HEX_DIR[i], rc.getType().bodyRadius + TREE_RADIUS), TREE_RADIUS)) { validCnt += 1; System.out.println("Available build slot at: " + Float.toString(HEX_DIR[i].radians)); float eval = getDensity(MY_LOCATION.add(HEX_DIR[i], MOVEMENT_SCANNING_DISTANCE), MOVEMENT_SCANNING_RADIUS, TREES); if (eval < density) { density = eval; spawn_direction = i; } } } System.out.println("validCnt: " + validCnt); if (spawn_direction == -1) { spawn_direction = 0; } SPAWN_DIR = HEX_DIR[spawn_direction]; settledDown = true; if (debug) rc.setIndicatorLine(MY_LOCATION, target, 255, 0, 0); } else { if (debug) rc.setIndicatorLine(MY_LOCATION, target, 255, 255, 0); if (am_stuck) { search_dir = !search_dir; am_stuck = false; staying_conut = 0; } moveToTargetExperimental(MY_LOCATION, SENSOR_GRANULARITY, search_dir, cooldown_active, SLUG); } } private void broadcastAvailableSettleLocations() throws GameActionException { int num_settle_pts = rc.readBroadcast(AVAILABLE_FARMING_LOCATIONS); for (int i = 0; i < 6; i++){ MapLocation newGardenerLoc = rc.getLocation().add(HEX_DIR[i], FARMER_CLEARANCE_RADIUS + EPSILON); if (globalBlacklist.contains(newGardenerLoc)) continue; //Stop broadcasting if location has been blacklisted if (broadcaster.isValidGrid(newGardenerLoc)){ //Imprecise but shouldn't matter much if (debug) rc.setIndicatorDot(newGardenerLoc, 0, 255, 0); rc.broadcast(FARMING_LOCATIONS_X + 2 * num_settle_pts, floatToIntPrecise(newGardenerLoc.x)); rc.broadcast(FARMING_LOCATIONS_Y + 2 * num_settle_pts, floatToIntPrecise(newGardenerLoc.y)); num_settle_pts += 1; } } rc.broadcast(AVAILABLE_FARMING_LOCATIONS, num_settle_pts); } //<------------------------- MOVEMENT ALGORITHMS -------------------------> private float gardenerSurroundingDensity(MapLocation travelLoc) throws GameActionException { float curDensity = 0; for (int i=0;i<6;i++){ //Scan in a hexagonal pattern around the gardener MapLocation testLoc = travelLoc.add(new Direction(DEG_60*i), rc.getType().bodyRadius + 1f + EPSILON); if (!rc.onTheMap(testLoc.add(new Direction(DEG_60*i), 0.5f), 0.25f)){ curDensity += 1f; } else curDensity += getDensity(testLoc, 1f, OBJECTS); } return curDensity; } //todo bytecode saving metric private float gardenerDensity(MapLocation travelLoc) throws GameActionException { return getDensity(travelLoc, 3f, OBJECTS); } private boolean gardenerDensityMove(int GRANULARITY) throws GameActionException { if (cooldown_active) { if (cooldownDir == null) { //Look further away (long-range density) --> should escape local minimum Direction initDir = MY_LOCATION.directionTo(ENEMY_ARCHON_LOCATION).opposite(); float curScore = 0f; Direction tryDir = null; float bestScore = 99999f; Direction bestDir = null; MapLocation tryLoc = null; tryDir = initDir; tryLoc = MY_LOCATION.add(tryDir, rc.getType().sensorRadius-2f); curScore = getDensity(tryLoc, 2f, OBJECTS); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; } for (int i = 1; i < GRANULARITY / 2; i++) { if (Clock.getBytecodeNum() > BYTE_THRESHOLD) continue; tryDir = initDir.rotateRightRads(i * DEG_360 / GRANULARITY); tryLoc = MY_LOCATION.add(tryDir, rc.getType().sensorRadius-2f); curScore = getDensity(tryLoc, 2f, OBJECTS); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; } tryDir = initDir.rotateLeftRads(i * DEG_360 / GRANULARITY); tryLoc = MY_LOCATION.add(tryDir, rc.getType().sensorRadius-2f); curScore = getDensity(tryLoc, 2f, OBJECTS); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; } } if (bestDir != null){ cooldownDir = bestDir; } else{ cooldownDir = initDir; } } target = MY_LOCATION.add(cooldownDir, 5f); if (am_stuck) { search_dir = !search_dir; am_stuck = false; staying_conut = 0; } moveToTargetExperimental(MY_LOCATION, SENSOR_GRANULARITY, search_dir, cooldown_active, SLUG); } else{ cooldownDir = null; //Look close to robot Direction initDir = MY_LOCATION.directionTo(ENEMY_ARCHON_LOCATION).opposite(); //if (SPAWNING_ARCHON_LOCATION != null) initDir = SPAWNING_ARCHON_LOCATION.directionTo(MY_LOCATION); Direction bestDir = null; float bestDist = 0f; for (int j = 0; j < 1; j++) { float curScore = 0f; Direction tryDir = null; float bestScore = 99999f; float tryDist = (rc.getType().strideRadius) / (float) Math.pow(2, j); tryDir = initDir; if (rc.canMove(tryDir, tryDist)) { curScore = gardenerSurroundingDensity(MY_LOCATION.add(tryDir, tryDist)); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; bestDist = tryDist; if (curScore == 0) break; //Already minimum density direction :) } } for (int i = 1; i < GRANULARITY / 2; i++) { System.out.println("Each pass byte cost: "+Clock.getBytecodeNum()); //if (Clock.getBytecodeNum() > BYTE_THRESHOLD) continue; tryDir = initDir.rotateRightRads(i * DEG_360 / GRANULARITY); if (rc.canMove(tryDir, tryDist)) { curScore = gardenerSurroundingDensity(MY_LOCATION.add(tryDir, tryDist)); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; bestDist = tryDist; if (curScore == 0) break; //Already minimum density direction :) } } tryDir = initDir.rotateLeftRads(i * DEG_360 / GRANULARITY); if (rc.canMove(tryDir, tryDist)) { curScore = gardenerSurroundingDensity(MY_LOCATION.add(tryDir, tryDist)); if (curScore < bestScore) { bestScore = curScore; bestDir = tryDir; bestDist = tryDist; if (curScore == 0) break; //Already minimum density direction :) } } } } if (bestDir != null && bestDist != 0f) { if (rc.canMove(bestDir, bestDist)) { rc.move(bestDir, bestDist); return true; } } } return false; } }
Fixed multiple archon gardener bug
src/Scarif_stable/GardenerPlayer.java
Fixed multiple archon gardener bug
<ide><path>rc/Scarif_stable/GardenerPlayer.java <ide> static int settleTimer; <ide> static final int MAX_READ_SETTLE_PTS = 50; //Maximum number of settle pts read <ide> static int MAX_INIT_TREES = 2; //Max number of early game trees <add> static float MIN_SEPARATION_DIST = 23f; //If target is further than nf, start new local farm <ide> private ArrayList<Direction> current_available_slots = new ArrayList<>(); <ide> <ide> //Reads current unit composition <ide> //if (blacklist.contains(newPt) || localBlacklist.contains(newPt)){ <ide> if (localBlacklist.contains(newPt) || globalBlacklist.contains(newPt)) continue; <ide> float dist = newPt.distanceTo(MY_LOCATION); <add> if (dist > MIN_SEPARATION_DIST) continue; <ide> if (debug) rc.setIndicatorDot(newPt, 0, 255, 255); <ide> if (dist < mindist){ <ide> mindist = dist;
JavaScript
bsd-3-clause
d11fd97b3e1e092f7609b56cdc8a5f963e44a86f
0
Machyne/pal,Machyne/pal,Machyne/pal,Machyne/pal
var queryPAL = function(query, callback) { $.ajax({ type: 'POST', url: '/api/pal', data: { query: query, client: 'web' }, success: function (response) { callback(response.result.response); } }); }; $(document).ready(function () { // show speak checkbox only if browser supports tts if ('SpeechSynthesisUtterance' in window) { $("#speak").show(); $("#speak-check").attr("checked", true); } var showResult = function (result) { $('.result').html(result); $('.history-result').prepend('<li>' + result + '</li>'); if($('#speak-check').is(':checked')) { var utterance = new SpeechSynthesisUtterance(result); utterance.rate = 1.1; window.speechSynthesis.speak(utterance); } }, prompt = $('.prompt'), sendQuery = function () { var query = prompt.val(); if (query.length > 0) { queryPAL(query, showResult); $('.history-prompt').prepend('<li>' + query + '</li>'); } }; prompt.focus(); prompt.on('keypress', function (e) { // 'enter' key if (e.which == 13) { sendQuery(); } }); $('#go-btn').on('click', sendQuery); });
static/home.js
var queryPAL = function(query, callback) { $.ajax({ type: 'POST', url: '/api/pal', data: { query: query, client: 'web' }, success: function (response) { callback(response.result.response); } }); }; $(document).ready(function () { // show speak checkbox only if browser supports tts if ('SpeechSynthesisUtterance' in window) { $("#speak").show(); $("#speak-check").attr("checked", true); } var showResult = function (result) { $('.result').html(result); $('.history-result').prepend('<li>' + result + '</li>'); if($('#speak-check').is(':checked')) { var utterance = new SpeechSynthesisUtterance(result); utterance.rate = 1.3; window.speechSynthesis.speak(utterance); } }, prompt = $('.prompt'), sendQuery = function () { var query = prompt.val(); if (query.length > 0) { queryPAL(query, showResult); $('.history-prompt').prepend('<li>' + query + '</li>'); } }; prompt.focus(); prompt.on('keypress', function (e) { // 'enter' key if (e.which == 13) { sendQuery(); } }); $('#go-btn').on('click', sendQuery); });
making an executive decision for speech output to be slower
static/home.js
making an executive decision for speech output to be slower
<ide><path>tatic/home.js <ide> $('.history-result').prepend('<li>' + result + '</li>'); <ide> if($('#speak-check').is(':checked')) { <ide> var utterance = new SpeechSynthesisUtterance(result); <del> utterance.rate = 1.3; <add> utterance.rate = 1.1; <ide> window.speechSynthesis.speak(utterance); <ide> } <ide> },
JavaScript
bsd-3-clause
1eacf4c789f1c38ad4a72f7994a81206d1bd830f
0
Kitware/vtk-js,Kitware/vtk-js,Kitware/vtk-js,Kitware/vtk-js
import macro from 'vtk.js/Sources/macro'; import vtkAnnotatedCubeActor from 'vtk.js/Sources/Rendering/Core/AnnotatedCubeActor'; import vtkAxesActor from 'vtk.js/Sources/Rendering/Core/AxesActor'; import vtkCornerAnnotation from 'vtk.js/Sources/Interaction/UI/CornerAnnotation'; import vtkInteractorStyleManipulator from 'vtk.js/Sources/Interaction/Style/InteractorStyleManipulator'; import vtkMatrixBuilder from 'vtk.js/Sources/Common/Core/MatrixBuilder'; import vtkOpenGLRenderWindow from 'vtk.js/Sources/Rendering/OpenGL/RenderWindow'; import vtkOrientationMarkerWidget from 'vtk.js/Sources/Interaction/Widgets/OrientationMarkerWidget'; import vtkRenderer from 'vtk.js/Sources/Rendering/Core/Renderer'; import vtkRenderWindow from 'vtk.js/Sources/Rendering/Core/RenderWindow'; import vtkRenderWindowInteractor from 'vtk.js/Sources/Rendering/Core/RenderWindowInteractor'; import InteractionPresets from 'vtk.js/Sources/Interaction/Style/InteractorStyleManipulator/Presets'; import AnnotatedCubePresets from 'vtk.js/Sources/Rendering/Core/AnnotatedCubeActor/Presets'; const EPSILON = 0.000001; // ---------------------------------------------------------------------------- // vtkViewProxy methods // ---------------------------------------------------------------------------- function vtkViewProxy(publicAPI, model) { // Set our className model.classHierarchy.push('vtkViewProxy'); // Private -------------------------------------------------------------------- function updateAnnotationColor() { const [r, g, b] = model.renderer.getBackground(); model.cornerAnnotation.getAnnotationContainer().style.color = r + g + b > 1.5 ? 'black' : 'white'; } // Setup -------------------------------------------------------------------- model.renderWindow = vtkRenderWindow.newInstance(); model.renderer = vtkRenderer.newInstance({ background: [0, 0, 0] }); model.renderWindow.addRenderer(model.renderer); model.openglRenderWindow = vtkOpenGLRenderWindow.newInstance(); model.renderWindow.addView(model.openglRenderWindow); model.interactor = vtkRenderWindowInteractor.newInstance(); model.interactor.setView(model.openglRenderWindow); model.interactorStyle3D = vtkInteractorStyleManipulator.newInstance(); model.interactorStyle2D = vtkInteractorStyleManipulator.newInstance(); // Apply default interaction styles InteractionPresets.applyPreset('3D', model.interactorStyle3D); InteractionPresets.applyPreset('2D', model.interactorStyle2D); model.cornerAnnotation = vtkCornerAnnotation.newInstance(); // Setup interaction model.interactor.setInteractorStyle( model.useParallelRendering ? model.interactorStyle2D : model.interactorStyle3D ); model.camera = model.renderer.getActiveCamera(); model.camera.setParallelProjection(!!model.useParallelRendering); // Orientation axis setup ------------------------------------------------- model.orientationAxesArrow = vtkAxesActor.newInstance(); model.orientationAxesCube = vtkAnnotatedCubeActor.newInstance(); AnnotatedCubePresets.applyPreset('default', model.orientationAxesCube); AnnotatedCubePresets.applyPreset('lps', model.orientationAxesCube); model.orientationAxesMap = { arrow: model.orientationAxesArrow, cube: model.orientationAxesCube, }; model.orientationWidget = vtkOrientationMarkerWidget.newInstance({ actor: model.orientationAxesArrow, interactor: model.renderWindow.getInteractor(), }); model.orientationWidget.setEnabled(true); model.orientationWidget.setViewportCorner( vtkOrientationMarkerWidget.Corners.BOTTOM_LEFT ); model.orientationWidget.setViewportSize(0.1); // API ---------------------------------------------------------------------- publicAPI.setPresetToInteractor3D = (nameOrDefinitions) => { if (Array.isArray(nameOrDefinitions)) { return InteractionPresets.applyDefinitions( nameOrDefinitions, model.interactorStyle3D ); } return InteractionPresets.applyPreset( nameOrDefinitions, model.interactorStyle3D ); }; // -------------------------------------------------------------------------- publicAPI.setPresetToInteractor2D = (nameOrDefinitions) => { if (Array.isArray(nameOrDefinitions)) { return InteractionPresets.applyDefinitions( nameOrDefinitions, model.interactorStyle2D ); } return InteractionPresets.applyPreset( nameOrDefinitions, model.interactorStyle2D ); }; // -------------------------------------------------------------------------- publicAPI.setOrientationAxesType = (type) => { const actor = model.orientationAxesMap[type]; if (actor) { model.orientationAxesType = type; model.orientationWidget.setActor(actor); publicAPI.renderLater(); } }; // -------------------------------------------------------------------------- publicAPI.registerOrientationAxis = (name, actor) => { model.orientationAxesMap[name] = actor; }; // -------------------------------------------------------------------------- publicAPI.unregisterOrientationAxis = (name) => { delete model.orientationAxesMap[name]; }; // -------------------------------------------------------------------------- publicAPI.listOrientationAxis = () => Object.keys(model.orientationAxesMap); // -------------------------------------------------------------------------- publicAPI.setPresetToOrientationAxes = (nameOrDefinitions) => { let changeDetected = false; if (typeof nameOrDefinitions === 'string') { if (model.presetToOrientationAxes !== nameOrDefinitions) { model.presetToOrientationAxes = nameOrDefinitions; changeDetected = AnnotatedCubePresets.applyPreset( nameOrDefinitions, model.orientationAxesCube ); publicAPI.modified(); } return changeDetected; } model.presetToOrientationAxes = 'Custom'; changeDetected = AnnotatedCubePresets.applyDefinitions( nameOrDefinitions, model.orientationAxesCube ); publicAPI.modified(); return changeDetected; }; // -------------------------------------------------------------------------- publicAPI.setContainer = (container) => { if (model.container) { model.interactor.unbindEvents(model.container); model.openglRenderWindow.setContainer(null); model.cornerAnnotation.setContainer(null); } model.container = container; if (container) { model.openglRenderWindow.setContainer(container); model.cornerAnnotation.setContainer(container); model.interactor.initialize(); model.interactor.bindEvents(container); } }; // -------------------------------------------------------------------------- publicAPI.resize = () => { if (model.container) { const dims = model.container.getBoundingClientRect(); if (dims.width === dims.height && dims.width === 0) { return; } const devicePixelRatio = window.devicePixelRatio || 1; const width = Math.max(10, devicePixelRatio * Math.floor(dims.width)); const height = Math.max(10, devicePixelRatio * Math.floor(dims.height)); model.openglRenderWindow.setSize(width, height); publicAPI.invokeResize({ width, height }); publicAPI.renderLater(); } }; // -------------------------------------------------------------------------- publicAPI.renderLater = () => { if (model.representations.length > 0 && model.resetCameraOnFirstRender) { model.resetCameraOnFirstRender = false; publicAPI.resetCamera(); } model.orientationWidget.updateMarkerOrientation(); model.renderer.resetCameraClippingRange(); setTimeout(model.renderWindow.render, 0); }; // -------------------------------------------------------------------------- publicAPI.addRepresentation = (representation) => { if (!representation) { return; } if (model.representations.indexOf(representation) === -1) { model.representations.push(representation); representation.getActors().forEach(model.renderer.addActor); representation.getVolumes().forEach(model.renderer.addVolume); } }; // -------------------------------------------------------------------------- publicAPI.removeRepresentation = (representation) => { if (!representation) { return; } if (model.representations.indexOf(representation) !== -1) { model.representations = model.representations.filter( (r) => r !== representation ); representation.getActors().forEach(model.renderer.removeActor); representation.getVolumes().forEach(model.renderer.removeVolume); } if (model.representations.length === 0) { model.resetCameraOnFirstRender = true; } }; // -------------------------------------------------------------------------- publicAPI.resetCamera = () => { model.renderer.resetCamera(); model.renderer.resetCameraClippingRange(); model.interactorStyle2D.setCenterOfRotation(model.camera.getFocalPoint()); model.interactorStyle3D.setCenterOfRotation(model.camera.getFocalPoint()); publicAPI.renderLater(); }; // -------------------------------------------------------------------------- publicAPI.captureImage = () => model.renderWindow.captureImages()[0]; // -------------------------------------------------------------------------- publicAPI.openCaptureImage = (target = '_blank') => { const image = new Image(); publicAPI.captureImage().then((imageURL) => { image.src = imageURL; const w = window.open('', target); w.document.write(image.outerHTML); w.document.title = 'vtk.js Image Capture'; window.focus(); }); }; // -------------------------------------------------------------------------- publicAPI.setCornerAnnotation = (corner, templateString) => { model.cornerAnnotation.updateTemplates({ [corner]: (meta) => vtkCornerAnnotation.applyTemplate(templateString, meta), }); }; // -------------------------------------------------------------------------- publicAPI.setCornerAnnotations = (annotations, useTemplateString = false) => { if (useTemplateString) { Object.keys(annotations).forEach((key) => { publicAPI.setCornerAnnotation(key, annotations[key]); }); } else { model.cornerAnnotation.updateTemplates(annotations); } }; // -------------------------------------------------------------------------- publicAPI.updateCornerAnnotation = (meta) => model.cornerAnnotation.updateMetadata(meta); // -------------------------------------------------------------------------- publicAPI.setAnnotationOpacity = (opacity) => { if (model.annotationOpacity !== Number(opacity)) { model.annotationOpacity = Number(opacity); model.cornerAnnotation.getAnnotationContainer().style.opacity = opacity; publicAPI.modified(); } }; // -------------------------------------------------------------------------- publicAPI.setBackground = macro.chain( model.renderer.setBackground, updateAnnotationColor ); // -------------------------------------------------------------------------- publicAPI.getBackground = model.renderer.getBackground; // -------------------------------------------------------------------------- publicAPI.setAnimation = (enable, requester = publicAPI) => { if (model.disableAnimation && enable) { return; } if (enable) { model.renderWindow.getInteractor().requestAnimation(requester); } else { const skipWarning = requester === publicAPI || `${requester}`.indexOf('ViewProxy.updateOrientation.') === 0; model.renderWindow .getInteractor() .cancelAnimation(requester, skipWarning); } }; // -------------------------------------------------------------------------- publicAPI.updateOrientation = ( axisIndex, orientation, viewUp, animateSteps = 0 ) => { if (axisIndex === undefined) { return Promise.resolve(); } const originalPosition = model.camera.getPosition(); const originalViewUp = model.camera.getViewUp(); const originalFocalPoint = model.camera.getFocalPoint(); model.axis = axisIndex; model.orientation = orientation; model.viewUp = viewUp; const position = model.camera.getFocalPoint(); position[model.axis] += model.orientation; model.camera.setPosition(...position); model.camera.setViewUp(...viewUp); model.renderer.resetCamera(); const destPosition = model.camera.getPosition(); const destViewUp = model.camera.getViewUp(); // Reset to original to prevent initial render flash model.camera.setPosition(...originalPosition); model.camera.setViewUp(...originalViewUp); const animationStack = [{ position: destPosition, viewUp: destViewUp }]; if (animateSteps) { const deltaPosition = [ (originalPosition[0] - destPosition[0]) / animateSteps, (originalPosition[1] - destPosition[1]) / animateSteps, (originalPosition[2] - destPosition[2]) / animateSteps, ]; const deltaViewUp = [ (originalViewUp[0] - destViewUp[0]) / animateSteps, (originalViewUp[1] - destViewUp[1]) / animateSteps, (originalViewUp[2] - destViewUp[2]) / animateSteps, ]; const needSteps = deltaPosition[0] || deltaPosition[1] || deltaPosition[2] || deltaViewUp[0] || deltaViewUp[1] || deltaViewUp[2]; const positionDeltaAxisCount = deltaPosition .map((i) => (Math.abs(i) < EPSILON ? 0 : 1)) .reduce((a, b) => a + b, 0); const viewUpDeltaAxisCount = deltaViewUp .map((i) => (Math.abs(i) < EPSILON ? 0 : 1)) .reduce((a, b) => a + b, 0); const rotation180Only = viewUpDeltaAxisCount === 1 && positionDeltaAxisCount === 0; if (needSteps) { if (rotation180Only) { const availableAxes = originalFocalPoint .map( (fp, i) => Math.abs(originalPosition[i] - fp) < EPSILON ? i : null ) .filter((i) => i !== null); const axisCorrectionIndex = availableAxes.find( (v) => Math.abs(deltaViewUp[v]) < EPSILON ); for (let i = 0; i < animateSteps; i++) { const newViewUp = [ viewUp[0] + (i + 1) * deltaViewUp[0], viewUp[1] + (i + 1) * deltaViewUp[1], viewUp[2] + (i + 1) * deltaViewUp[2], ]; newViewUp[axisCorrectionIndex] = Math.sin( (Math.PI * i) / (animateSteps - 1) ); animationStack.push({ position: destPosition, viewUp: newViewUp, }); } } else { for (let i = 0; i < animateSteps; i++) { animationStack.push({ position: [ destPosition[0] + (i + 1) * deltaPosition[0], destPosition[1] + (i + 1) * deltaPosition[1], destPosition[2] + (i + 1) * deltaPosition[2], ], viewUp: [ viewUp[0] + (i + 1) * deltaViewUp[0], viewUp[1] + (i + 1) * deltaViewUp[1], viewUp[2] + (i + 1) * deltaViewUp[2], ], }); } } } } if (animationStack.length === 1) { // update camera directly model.camera.set(animationStack.pop()); model.renderer.resetCameraClippingRange(); if (model.interactor.getLightFollowCamera()) { model.renderer.updateLightsGeometryToFollowCamera(); } return Promise.resolve(); } return new Promise((resolve, reject) => { const now = performance.now().toString(); const animationRequester = `ViewProxy.updateOrientation.${now}`; publicAPI.setAnimation(true, animationRequester); let intervalId = null; const consumeAnimationStack = () => { if (animationStack.length) { const { position: cameraPosition, viewUp: cameraViewUp, } = animationStack.pop(); model.camera.setPosition(...cameraPosition); model.camera.setViewUp(...cameraViewUp); model.renderer.resetCameraClippingRange(); if (model.interactor.getLightFollowCamera()) { model.renderer.updateLightsGeometryToFollowCamera(); } } else { clearInterval(intervalId); publicAPI.setAnimation(false, animationRequester); resolve(); } }; intervalId = setInterval(consumeAnimationStack, 1); }); }; // -------------------------------------------------------------------------- publicAPI.resetOrientation = (animateSteps = 0) => publicAPI.updateOrientation( model.axis, model.orientation, model.viewUp, animateSteps ); // -------------------------------------------------------------------------- publicAPI.rotate = (angle) => { const { viewUp, focalPoint, position } = model.camera.get( 'viewUp', 'focalPoint', 'position' ); const axis = [ focalPoint[0] - position[0], focalPoint[1] - position[1], focalPoint[2] - position[2], ]; vtkMatrixBuilder .buildFromDegree() .rotate(Number.isNaN(angle) ? 90 : angle, axis) .apply(viewUp); model.camera.setViewUp(...viewUp); model.camera.modified(); model.orientationWidget.updateMarkerOrientation(); model.renderWindow.render(); }; // -------------------------------------------------------------------------- // Initialization from state or input // -------------------------------------------------------------------------- publicAPI.resetOrientation(); updateAnnotationColor(); } // ---------------------------------------------------------------------------- // Object factory // ---------------------------------------------------------------------------- const DEFAULT_VALUES = { representations: [], sectionName: 'view', annotationOpacity: 1, resetCameraOnFirstRender: true, presetToOrientationAxes: 'lps', orientationAxesType: 'arrow', disableAnimation: false, axis: 1, orientation: 0, viewUp: [0, 0, 1], }; // ---------------------------------------------------------------------------- function extend(publicAPI, model, initialValues = {}) { Object.assign(model, DEFAULT_VALUES, initialValues); macro.obj(publicAPI, model); macro.setGet(publicAPI, model, ['name', 'disableAnimation']); macro.get(publicAPI, model, [ 'annotationOpacity', 'camera', 'container', 'cornerAnnotation', 'interactor', 'interactorStyle2D', 'interactorStyle3D', 'openglRenderWindow', 'orientationAxesType', 'presetToOrientationAxes', 'renderer', 'renderWindow', 'representations', 'useParallelRendering', ]); macro.event(publicAPI, model, 'Resize'); // Object specific methods vtkViewProxy(publicAPI, model); // Proxy handling macro.proxy(publicAPI, model); macro.proxyPropertyMapping(publicAPI, model, { orientationAxesVisibility: { modelKey: 'orientationWidget', property: 'enabled', }, orientationAxesCorner: { modelKey: 'orientationWidget', property: 'viewportCorner', }, orientationAxesSize: { modelKey: 'orientationWidget', property: 'viewportSize', }, cameraViewUp: { modelKey: 'camera', property: 'viewUp', modified: false }, cameraPosition: { modelKey: 'camera', property: 'position', modified: false, }, cameraFocalPoint: { modelKey: 'camera', property: 'focalPoint', modified: false, }, }); } // ---------------------------------------------------------------------------- export const newInstance = macro.newInstance(extend, 'vtkViewProxy'); // ---------------------------------------------------------------------------- export default { newInstance, extend };
Sources/Proxy/Core/ViewProxy/index.js
import macro from 'vtk.js/Sources/macro'; import vtkAnnotatedCubeActor from 'vtk.js/Sources/Rendering/Core/AnnotatedCubeActor'; import vtkAxesActor from 'vtk.js/Sources/Rendering/Core/AxesActor'; import vtkCornerAnnotation from 'vtk.js/Sources/Interaction/UI/CornerAnnotation'; import vtkInteractorStyleManipulator from 'vtk.js/Sources/Interaction/Style/InteractorStyleManipulator'; import vtkMatrixBuilder from 'vtk.js/Sources/Common/Core/MatrixBuilder'; import vtkOpenGLRenderWindow from 'vtk.js/Sources/Rendering/OpenGL/RenderWindow'; import vtkOrientationMarkerWidget from 'vtk.js/Sources/Interaction/Widgets/OrientationMarkerWidget'; import vtkRenderer from 'vtk.js/Sources/Rendering/Core/Renderer'; import vtkRenderWindow from 'vtk.js/Sources/Rendering/Core/RenderWindow'; import vtkRenderWindowInteractor from 'vtk.js/Sources/Rendering/Core/RenderWindowInteractor'; import InteractionPresets from 'vtk.js/Sources/Interaction/Style/InteractorStyleManipulator/Presets'; import AnnotatedCubePresets from 'vtk.js/Sources/Rendering/Core/AnnotatedCubeActor/Presets'; const EPSILON = 0.000001; // ---------------------------------------------------------------------------- // vtkViewProxy methods // ---------------------------------------------------------------------------- function vtkViewProxy(publicAPI, model) { // Set our className model.classHierarchy.push('vtkViewProxy'); // Private -------------------------------------------------------------------- function updateAnnotationColor() { const [r, g, b] = model.renderer.getBackground(); model.cornerAnnotation.getAnnotationContainer().style.color = r + g + b > 1.5 ? 'black' : 'white'; } // Setup -------------------------------------------------------------------- model.renderWindow = vtkRenderWindow.newInstance(); model.renderer = vtkRenderer.newInstance({ background: [0, 0, 0] }); model.renderWindow.addRenderer(model.renderer); model.openglRenderWindow = vtkOpenGLRenderWindow.newInstance(); model.renderWindow.addView(model.openglRenderWindow); model.interactor = vtkRenderWindowInteractor.newInstance(); model.interactor.setView(model.openglRenderWindow); model.interactorStyle3D = vtkInteractorStyleManipulator.newInstance(); model.interactorStyle2D = vtkInteractorStyleManipulator.newInstance(); // Apply default interaction styles InteractionPresets.applyPreset('3D', model.interactorStyle3D); InteractionPresets.applyPreset('2D', model.interactorStyle2D); model.cornerAnnotation = vtkCornerAnnotation.newInstance(); // Setup interaction model.interactor.setInteractorStyle( model.useParallelRendering ? model.interactorStyle2D : model.interactorStyle3D ); model.camera = model.renderer.getActiveCamera(); model.camera.setParallelProjection(!!model.useParallelRendering); // Orientation axis setup ------------------------------------------------- model.orientationAxesArrow = vtkAxesActor.newInstance(); model.orientationAxesCube = vtkAnnotatedCubeActor.newInstance(); AnnotatedCubePresets.applyPreset('default', model.orientationAxesCube); AnnotatedCubePresets.applyPreset('lps', model.orientationAxesCube); model.orientationAxesList = [ { name: 'arrow', actor: model.orientationAxesArrow }, { name: 'cube', actor: model.orientationAxesCube }, ]; model.orientationWidget = vtkOrientationMarkerWidget.newInstance({ actor: model.orientationAxesArrow, interactor: model.renderWindow.getInteractor(), }); model.orientationWidget.setEnabled(true); model.orientationWidget.setViewportCorner( vtkOrientationMarkerWidget.Corners.BOTTOM_LEFT ); model.orientationWidget.setViewportSize(0.1); // API ---------------------------------------------------------------------- publicAPI.setPresetToInteractor3D = (nameOrDefinitions) => { if (Array.isArray(nameOrDefinitions)) { return InteractionPresets.applyDefinitions( nameOrDefinitions, model.interactorStyle3D ); } return InteractionPresets.applyPreset( nameOrDefinitions, model.interactorStyle3D ); }; // -------------------------------------------------------------------------- publicAPI.setPresetToInteractor2D = (nameOrDefinitions) => { if (Array.isArray(nameOrDefinitions)) { return InteractionPresets.applyDefinitions( nameOrDefinitions, model.interactorStyle2D ); } return InteractionPresets.applyPreset( nameOrDefinitions, model.interactorStyle2D ); }; // -------------------------------------------------------------------------- publicAPI.setOrientationAxesType = (type) => { const orientationAxis = model.listOrientationAxis.find( (element) => element.name === type ); if (orientationAxis !== undefined) { model.orientationAxesType = orientationAxis.name; model.orientationWidget.setActor(orientationAxis.actor); publicAPI.renderLater(); } }; // -------------------------------------------------------------------------- publicAPI.registerOrientationAxis = (name, actor) => { const orientationAxisIndex = model.listOrientationAxis.findIndex( (element) => element.name === name ); if (orientationAxisIndex === -1) { model.listOrientationAxis.push({ name, actor }); } }; // -------------------------------------------------------------------------- publicAPI.unregisterOrientationAxis = (name) => { const orientationAxisIndex = model.listOrientationAxis.findIndex( (element) => element.name === name ); if (orientationAxisIndex !== -1) { model.listOrientationAxis.splice(orientationAxisIndex, 1); } }; // -------------------------------------------------------------------------- publicAPI.setPresetToOrientationAxes = (nameOrDefinitions) => { let changeDetected = false; if (typeof nameOrDefinitions === 'string') { if (model.presetToOrientationAxes !== nameOrDefinitions) { model.presetToOrientationAxes = nameOrDefinitions; changeDetected = AnnotatedCubePresets.applyPreset( nameOrDefinitions, model.orientationAxesCube ); publicAPI.modified(); } return changeDetected; } model.presetToOrientationAxes = 'Custom'; changeDetected = AnnotatedCubePresets.applyDefinitions( nameOrDefinitions, model.orientationAxesCube ); publicAPI.modified(); return changeDetected; }; // -------------------------------------------------------------------------- publicAPI.setContainer = (container) => { if (model.container) { model.interactor.unbindEvents(model.container); model.openglRenderWindow.setContainer(null); model.cornerAnnotation.setContainer(null); } model.container = container; if (container) { model.openglRenderWindow.setContainer(container); model.cornerAnnotation.setContainer(container); model.interactor.initialize(); model.interactor.bindEvents(container); } }; // -------------------------------------------------------------------------- publicAPI.resize = () => { if (model.container) { const dims = model.container.getBoundingClientRect(); if (dims.width === dims.height && dims.width === 0) { return; } const devicePixelRatio = window.devicePixelRatio || 1; const width = Math.max(10, devicePixelRatio * Math.floor(dims.width)); const height = Math.max(10, devicePixelRatio * Math.floor(dims.height)); model.openglRenderWindow.setSize(width, height); publicAPI.invokeResize({ width, height }); publicAPI.renderLater(); } }; // -------------------------------------------------------------------------- publicAPI.renderLater = () => { if (model.representations.length > 0 && model.resetCameraOnFirstRender) { model.resetCameraOnFirstRender = false; publicAPI.resetCamera(); } model.orientationWidget.updateMarkerOrientation(); model.renderer.resetCameraClippingRange(); setTimeout(model.renderWindow.render, 0); }; // -------------------------------------------------------------------------- publicAPI.addRepresentation = (representation) => { if (!representation) { return; } if (model.representations.indexOf(representation) === -1) { model.representations.push(representation); representation.getActors().forEach(model.renderer.addActor); representation.getVolumes().forEach(model.renderer.addVolume); } }; // -------------------------------------------------------------------------- publicAPI.removeRepresentation = (representation) => { if (!representation) { return; } if (model.representations.indexOf(representation) !== -1) { model.representations = model.representations.filter( (r) => r !== representation ); representation.getActors().forEach(model.renderer.removeActor); representation.getVolumes().forEach(model.renderer.removeVolume); } if (model.representations.length === 0) { model.resetCameraOnFirstRender = true; } }; // -------------------------------------------------------------------------- publicAPI.resetCamera = () => { model.renderer.resetCamera(); model.renderer.resetCameraClippingRange(); model.interactorStyle2D.setCenterOfRotation(model.camera.getFocalPoint()); model.interactorStyle3D.setCenterOfRotation(model.camera.getFocalPoint()); publicAPI.renderLater(); }; // -------------------------------------------------------------------------- publicAPI.captureImage = () => model.renderWindow.captureImages()[0]; // -------------------------------------------------------------------------- publicAPI.openCaptureImage = (target = '_blank') => { const image = new Image(); publicAPI.captureImage().then((imageURL) => { image.src = imageURL; const w = window.open('', target); w.document.write(image.outerHTML); w.document.title = 'vtk.js Image Capture'; window.focus(); }); }; // -------------------------------------------------------------------------- publicAPI.setCornerAnnotation = (corner, templateString) => { model.cornerAnnotation.updateTemplates({ [corner]: (meta) => vtkCornerAnnotation.applyTemplate(templateString, meta), }); }; // -------------------------------------------------------------------------- publicAPI.setCornerAnnotations = (annotations, useTemplateString = false) => { if (useTemplateString) { Object.keys(annotations).forEach((key) => { publicAPI.setCornerAnnotation(key, annotations[key]); }); } else { model.cornerAnnotation.updateTemplates(annotations); } }; // -------------------------------------------------------------------------- publicAPI.updateCornerAnnotation = (meta) => model.cornerAnnotation.updateMetadata(meta); // -------------------------------------------------------------------------- publicAPI.setAnnotationOpacity = (opacity) => { if (model.annotationOpacity !== Number(opacity)) { model.annotationOpacity = Number(opacity); model.cornerAnnotation.getAnnotationContainer().style.opacity = opacity; publicAPI.modified(); } }; // -------------------------------------------------------------------------- publicAPI.setBackground = macro.chain( model.renderer.setBackground, updateAnnotationColor ); // -------------------------------------------------------------------------- publicAPI.getBackground = model.renderer.getBackground; // -------------------------------------------------------------------------- publicAPI.setAnimation = (enable, requester = publicAPI) => { if (model.disableAnimation && enable) { return; } if (enable) { model.renderWindow.getInteractor().requestAnimation(requester); } else { const skipWarning = requester === publicAPI || `${requester}`.indexOf('ViewProxy.updateOrientation.') === 0; model.renderWindow .getInteractor() .cancelAnimation(requester, skipWarning); } }; // -------------------------------------------------------------------------- publicAPI.updateOrientation = ( axisIndex, orientation, viewUp, animateSteps = 0 ) => { if (axisIndex === undefined) { return Promise.resolve(); } const originalPosition = model.camera.getPosition(); const originalViewUp = model.camera.getViewUp(); const originalFocalPoint = model.camera.getFocalPoint(); model.axis = axisIndex; model.orientation = orientation; model.viewUp = viewUp; const position = model.camera.getFocalPoint(); position[model.axis] += model.orientation; model.camera.setPosition(...position); model.camera.setViewUp(...viewUp); model.renderer.resetCamera(); const destPosition = model.camera.getPosition(); const destViewUp = model.camera.getViewUp(); // Reset to original to prevent initial render flash model.camera.setPosition(...originalPosition); model.camera.setViewUp(...originalViewUp); const animationStack = [{ position: destPosition, viewUp: destViewUp }]; if (animateSteps) { const deltaPosition = [ (originalPosition[0] - destPosition[0]) / animateSteps, (originalPosition[1] - destPosition[1]) / animateSteps, (originalPosition[2] - destPosition[2]) / animateSteps, ]; const deltaViewUp = [ (originalViewUp[0] - destViewUp[0]) / animateSteps, (originalViewUp[1] - destViewUp[1]) / animateSteps, (originalViewUp[2] - destViewUp[2]) / animateSteps, ]; const needSteps = deltaPosition[0] || deltaPosition[1] || deltaPosition[2] || deltaViewUp[0] || deltaViewUp[1] || deltaViewUp[2]; const positionDeltaAxisCount = deltaPosition .map((i) => (Math.abs(i) < EPSILON ? 0 : 1)) .reduce((a, b) => a + b, 0); const viewUpDeltaAxisCount = deltaViewUp .map((i) => (Math.abs(i) < EPSILON ? 0 : 1)) .reduce((a, b) => a + b, 0); const rotation180Only = viewUpDeltaAxisCount === 1 && positionDeltaAxisCount === 0; if (needSteps) { if (rotation180Only) { const availableAxes = originalFocalPoint .map( (fp, i) => Math.abs(originalPosition[i] - fp) < EPSILON ? i : null ) .filter((i) => i !== null); const axisCorrectionIndex = availableAxes.find( (v) => Math.abs(deltaViewUp[v]) < EPSILON ); for (let i = 0; i < animateSteps; i++) { const newViewUp = [ viewUp[0] + (i + 1) * deltaViewUp[0], viewUp[1] + (i + 1) * deltaViewUp[1], viewUp[2] + (i + 1) * deltaViewUp[2], ]; newViewUp[axisCorrectionIndex] = Math.sin( (Math.PI * i) / (animateSteps - 1) ); animationStack.push({ position: destPosition, viewUp: newViewUp, }); } } else { for (let i = 0; i < animateSteps; i++) { animationStack.push({ position: [ destPosition[0] + (i + 1) * deltaPosition[0], destPosition[1] + (i + 1) * deltaPosition[1], destPosition[2] + (i + 1) * deltaPosition[2], ], viewUp: [ viewUp[0] + (i + 1) * deltaViewUp[0], viewUp[1] + (i + 1) * deltaViewUp[1], viewUp[2] + (i + 1) * deltaViewUp[2], ], }); } } } } if (animationStack.length === 1) { // update camera directly model.camera.set(animationStack.pop()); model.renderer.resetCameraClippingRange(); if (model.interactor.getLightFollowCamera()) { model.renderer.updateLightsGeometryToFollowCamera(); } return Promise.resolve(); } return new Promise((resolve, reject) => { const now = performance.now().toString(); const animationRequester = `ViewProxy.updateOrientation.${now}`; publicAPI.setAnimation(true, animationRequester); let intervalId = null; const consumeAnimationStack = () => { if (animationStack.length) { const { position: cameraPosition, viewUp: cameraViewUp, } = animationStack.pop(); model.camera.setPosition(...cameraPosition); model.camera.setViewUp(...cameraViewUp); model.renderer.resetCameraClippingRange(); if (model.interactor.getLightFollowCamera()) { model.renderer.updateLightsGeometryToFollowCamera(); } } else { clearInterval(intervalId); publicAPI.setAnimation(false, animationRequester); resolve(); } }; intervalId = setInterval(consumeAnimationStack, 1); }); }; // -------------------------------------------------------------------------- publicAPI.resetOrientation = (animateSteps = 0) => publicAPI.updateOrientation( model.axis, model.orientation, model.viewUp, animateSteps ); // -------------------------------------------------------------------------- publicAPI.rotate = (angle) => { const { viewUp, focalPoint, position } = model.camera.get( 'viewUp', 'focalPoint', 'position' ); const axis = [ focalPoint[0] - position[0], focalPoint[1] - position[1], focalPoint[2] - position[2], ]; vtkMatrixBuilder .buildFromDegree() .rotate(Number.isNaN(angle) ? 90 : angle, axis) .apply(viewUp); model.camera.setViewUp(...viewUp); model.camera.modified(); model.orientationWidget.updateMarkerOrientation(); model.renderWindow.render(); }; // -------------------------------------------------------------------------- // Initialization from state or input // -------------------------------------------------------------------------- publicAPI.resetOrientation(); updateAnnotationColor(); } // ---------------------------------------------------------------------------- // Object factory // ---------------------------------------------------------------------------- const DEFAULT_VALUES = { representations: [], sectionName: 'view', annotationOpacity: 1, resetCameraOnFirstRender: true, presetToOrientationAxes: 'lps', orientationAxesType: 'arrow', disableAnimation: false, axis: 1, orientation: 0, viewUp: [0, 0, 1], }; // ---------------------------------------------------------------------------- function extend(publicAPI, model, initialValues = {}) { Object.assign(model, DEFAULT_VALUES, initialValues); macro.obj(publicAPI, model); macro.setGet(publicAPI, model, ['name', 'disableAnimation']); macro.get(publicAPI, model, [ 'annotationOpacity', 'camera', 'container', 'cornerAnnotation', 'interactor', 'interactorStyle2D', 'interactorStyle3D', 'listOrientationAxis', 'openglRenderWindow', 'orientationAxesType', 'presetToOrientationAxes', 'renderer', 'renderWindow', 'representations', 'useParallelRendering', ]); macro.event(publicAPI, model, 'Resize'); // Object specific methods vtkViewProxy(publicAPI, model); // Proxy handling macro.proxy(publicAPI, model); macro.proxyPropertyMapping(publicAPI, model, { orientationAxesVisibility: { modelKey: 'orientationWidget', property: 'enabled', }, orientationAxesCorner: { modelKey: 'orientationWidget', property: 'viewportCorner', }, orientationAxesSize: { modelKey: 'orientationWidget', property: 'viewportSize', }, cameraViewUp: { modelKey: 'camera', property: 'viewUp', modified: false }, cameraPosition: { modelKey: 'camera', property: 'position', modified: false, }, cameraFocalPoint: { modelKey: 'camera', property: 'focalPoint', modified: false, }, }); } // ---------------------------------------------------------------------------- export const newInstance = macro.newInstance(extend, 'vtkViewProxy'); // ---------------------------------------------------------------------------- export default { newInstance, extend };
replace array by map
Sources/Proxy/Core/ViewProxy/index.js
replace array by map
<ide><path>ources/Proxy/Core/ViewProxy/index.js <ide> AnnotatedCubePresets.applyPreset('default', model.orientationAxesCube); <ide> AnnotatedCubePresets.applyPreset('lps', model.orientationAxesCube); <ide> <del> model.orientationAxesList = [ <del> { name: 'arrow', actor: model.orientationAxesArrow }, <del> { name: 'cube', actor: model.orientationAxesCube }, <del> ]; <add> model.orientationAxesMap = { <add> arrow: model.orientationAxesArrow, <add> cube: model.orientationAxesCube, <add> }; <ide> model.orientationWidget = vtkOrientationMarkerWidget.newInstance({ <ide> actor: model.orientationAxesArrow, <ide> interactor: model.renderWindow.getInteractor(), <ide> // -------------------------------------------------------------------------- <ide> <ide> publicAPI.setOrientationAxesType = (type) => { <del> const orientationAxis = model.listOrientationAxis.find( <del> (element) => element.name === type <del> ); <del> if (orientationAxis !== undefined) { <del> model.orientationAxesType = orientationAxis.name; <del> model.orientationWidget.setActor(orientationAxis.actor); <add> const actor = model.orientationAxesMap[type]; <add> if (actor) { <add> model.orientationAxesType = type; <add> model.orientationWidget.setActor(actor); <ide> publicAPI.renderLater(); <ide> } <ide> }; <ide> // -------------------------------------------------------------------------- <ide> <ide> publicAPI.registerOrientationAxis = (name, actor) => { <del> const orientationAxisIndex = model.listOrientationAxis.findIndex( <del> (element) => element.name === name <del> ); <del> if (orientationAxisIndex === -1) { <del> model.listOrientationAxis.push({ name, actor }); <del> } <add> model.orientationAxesMap[name] = actor; <ide> }; <ide> <ide> // -------------------------------------------------------------------------- <ide> <ide> publicAPI.unregisterOrientationAxis = (name) => { <del> const orientationAxisIndex = model.listOrientationAxis.findIndex( <del> (element) => element.name === name <del> ); <del> if (orientationAxisIndex !== -1) { <del> model.listOrientationAxis.splice(orientationAxisIndex, 1); <del> } <del> }; <add> delete model.orientationAxesMap[name]; <add> }; <add> <add> // -------------------------------------------------------------------------- <add> <add> publicAPI.listOrientationAxis = () => Object.keys(model.orientationAxesMap); <ide> <ide> // -------------------------------------------------------------------------- <ide> <ide> 'interactor', <ide> 'interactorStyle2D', <ide> 'interactorStyle3D', <del> 'listOrientationAxis', <ide> 'openglRenderWindow', <ide> 'orientationAxesType', <ide> 'presetToOrientationAxes',
Java
mit
3b8c66135a1462278446f0f183f8c4f996a54f95
0
bmustiata/ftrace-injector
package com.ciplogic.jsinjector;// Generated from JavaScript.g4 by ANTLR 4.1 import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.TokenStreamRewriter; import org.antlr.v4.runtime.misc.NotNull; public class InjectCiplogicTraceWrap extends JavaScriptBaseListener { private final TokenStreamRewriter tokenStreamRewriter; private final String sourceFile; public InjectCiplogicTraceWrap(String sourceFile, TokenStream tokenStream) { this.sourceFile = sourceFile; this.tokenStreamRewriter = new TokenStreamRewriter(tokenStream); } @Override public void enterAnonymousFunction(@NotNull JavaScriptParser.AnonymousFunctionContext ctx) { wrapFunction("<anonymous>", ctx.unnamedFunction()); } @Override public void enterAssignedFunction(@NotNull JavaScriptParser.AssignedFunctionContext ctx) { String name = ctx.getChild(0).getText(); JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext = (JavaScriptParser.UnnamedFunctionContext) ctx.getChild(2); wrapFunction(name, unnamedFunctionContext); } @Override public void enterObjectFunction(@NotNull JavaScriptParser.ObjectFunctionContext ctx) { String name = null; if (ctx.string() != null) { String text = ctx.string().getText(); name = text.substring(1, text.length() - 1); } else if (ctx.identifier() != null) { name = ctx.identifier().getText(); } JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext = (JavaScriptParser.UnnamedFunctionContext) ctx.getChild(2); wrapFunction(name, unnamedFunctionContext); } @Override public void enterNamedFunction(@NotNull JavaScriptParser.NamedFunctionContext ctx) { String name = ctx.getChild(1).getText(); String prefixWrap = String.format("function %s() { return ftrace.wrap('%s', '" + getLocation(ctx) + "', function() {", name, name); String suffixWrap = String.format(" return __ciplogic_%s.apply(this,arguments); }).apply(this,arguments); }", name); tokenStreamRewriter.insertBefore(ctx.identifier().start, "__ciplogic_"); tokenStreamRewriter.insertBefore(ctx.start, prefixWrap); tokenStreamRewriter.insertAfter(ctx.stop, suffixWrap); } private void wrapFunction(String name, JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext) { if (unnamedFunctionContext.unnamedSimpleFunction() != null) { wrapSimpleFunction(name, unnamedFunctionContext); } else if (unnamedFunctionContext.unnamedContextFunction() != null) { wrapFunctionCall(name, unnamedFunctionContext); } } private void wrapSimpleFunction(String name, JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext) { String prefixWrap = String.format("ftrace.wrap('%s','" + getLocation(unnamedFunctionContext) + "',", name); String suffixWrap = ")"; tokenStreamRewriter.insertBefore(unnamedFunctionContext.start, prefixWrap); tokenStreamRewriter.insertAfter(unnamedFunctionContext.stop, suffixWrap); } private void wrapFunctionCall(String name, JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext) { String prefixWrap = String.format("ftrace.wrap('%s','" + getLocation(unnamedFunctionContext) + "',", name); JavaScriptParser.InvocationExpressionsContext invocationsExpression = unnamedFunctionContext.unnamedContextFunction().invocationExpressions(); String suffixWrap = ")(" + invocationsExpression.expressions().getText() + ")"; tokenStreamRewriter.delete( invocationsExpression.start, invocationsExpression.stop ); tokenStreamRewriter.insertBefore(unnamedFunctionContext.start, prefixWrap); tokenStreamRewriter.insertAfter(unnamedFunctionContext.stop, suffixWrap); } public String getFinalSource() { return tokenStreamRewriter.getText(); } private String getLocation(ParserRuleContext ctx) { return String.format("%s:%s", sourceFile, ctx.getStart().getLine()); } }
src/main/java/com/ciplogic/jsinjector/InjectCiplogicTraceWrap.java
package com.ciplogic.jsinjector;// Generated from JavaScript.g4 by ANTLR 4.1 import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.TokenStreamRewriter; import org.antlr.v4.runtime.misc.NotNull; public class InjectCiplogicTraceWrap extends JavaScriptBaseListener { private final TokenStreamRewriter tokenStreamRewriter; private final String sourceFile; public InjectCiplogicTraceWrap(String sourceFile, TokenStream tokenStream) { this.sourceFile = sourceFile; this.tokenStreamRewriter = new TokenStreamRewriter(tokenStream); } @Override public void enterAnonymousFunction(@NotNull JavaScriptParser.AnonymousFunctionContext ctx) { wrapFunction("<anonymous>", ctx.unnamedFunction()); } @Override public void enterAssignedFunction(@NotNull JavaScriptParser.AssignedFunctionContext ctx) { String name = ctx.getChild(0).getText(); JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext = (JavaScriptParser.UnnamedFunctionContext) ctx.getChild(2); wrapFunction(name, unnamedFunctionContext); } @Override public void enterObjectFunction(@NotNull JavaScriptParser.ObjectFunctionContext ctx) { String name = null; if (ctx.string() != null) { String text = ctx.string().getText(); name = text.substring(1, text.length() - 1); } else if (ctx.identifier() != null) { name = ctx.identifier().getText(); } JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext = (JavaScriptParser.UnnamedFunctionContext) ctx.getChild(2); wrapFunction(name, unnamedFunctionContext); } @Override public void enterNamedFunction(@NotNull JavaScriptParser.NamedFunctionContext ctx) { String name = ctx.getChild(1).getText(); String prefixWrap = String.format("function %s() { return ftrace.wrap('%s', '" + getLocation(ctx) + "', function() {", name, name); String suffixWrap = String.format(" return %s.apply(this,arguments); }).apply(this,arguments); }", name); tokenStreamRewriter.insertBefore(ctx.start, prefixWrap); tokenStreamRewriter.insertAfter(ctx.stop, suffixWrap); } private void wrapFunction(String name, JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext) { if (unnamedFunctionContext.unnamedSimpleFunction() != null) { wrapSimpleFunction(name, unnamedFunctionContext); } else if (unnamedFunctionContext.unnamedContextFunction() != null) { wrapFunctionCall(name, unnamedFunctionContext); } } private void wrapSimpleFunction(String name, JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext) { String prefixWrap = String.format("ftrace.wrap('%s','" + getLocation(unnamedFunctionContext) + "',", name); String suffixWrap = ")"; tokenStreamRewriter.insertBefore(unnamedFunctionContext.start, prefixWrap); tokenStreamRewriter.insertAfter(unnamedFunctionContext.stop, suffixWrap); } private void wrapFunctionCall(String name, JavaScriptParser.UnnamedFunctionContext unnamedFunctionContext) { String prefixWrap = String.format("ftrace.wrap('%s','" + getLocation(unnamedFunctionContext) + "',", name); JavaScriptParser.InvocationExpressionsContext invocationsExpression = unnamedFunctionContext.unnamedContextFunction().invocationExpressions(); String suffixWrap = ")(" + invocationsExpression.expressions().getText() + ")"; tokenStreamRewriter.delete( invocationsExpression.start, invocationsExpression.stop ); tokenStreamRewriter.insertBefore(unnamedFunctionContext.start, prefixWrap); tokenStreamRewriter.insertAfter(unnamedFunctionContext.stop, suffixWrap); } public String getFinalSource() { return tokenStreamRewriter.getText(); } private String getLocation(ParserRuleContext ctx) { return String.format("%s:%s", sourceFile, ctx.getStart().getLine()); } }
Escape function names so setting callbacks from the inside of the function doesn't set it to the alleged wrapped function.
src/main/java/com/ciplogic/jsinjector/InjectCiplogicTraceWrap.java
Escape function names so setting callbacks from the inside of the function doesn't set it to the alleged wrapped function.
<ide><path>rc/main/java/com/ciplogic/jsinjector/InjectCiplogicTraceWrap.java <ide> String name = ctx.getChild(1).getText(); <ide> <ide> String prefixWrap = String.format("function %s() { return ftrace.wrap('%s', '" + getLocation(ctx) + "', function() {", name, name); <del> String suffixWrap = String.format(" return %s.apply(this,arguments); }).apply(this,arguments); }", name); <add> String suffixWrap = String.format(" return __ciplogic_%s.apply(this,arguments); }).apply(this,arguments); }", name); <add> <add> tokenStreamRewriter.insertBefore(ctx.identifier().start, "__ciplogic_"); <ide> <ide> tokenStreamRewriter.insertBefore(ctx.start, prefixWrap); <ide> tokenStreamRewriter.insertAfter(ctx.stop, suffixWrap);
Java
apache-2.0
bf33495cdaa9f5eed7d24e6c96a3e6fd0d8a5eb6
0
ne0fhyk/DP-Wear,DroidPlanner/tower-wear
package com.ox3dr.services.android.lib.drone.property; import android.os.Parcel; import android.os.Parcelable; /** * Created by fhuya on 10/28/14. */ public enum VehicleMode implements Parcelable { PLANE_MANUAL(0, Type.TYPE_PLANE, "Manual"), PLANE_CIRCLE(1, Type.TYPE_PLANE, "Circle"), PLANE_STABILIZE(2, Type.TYPE_PLANE, "Stabilize"), PLANE_TRAINING(3, Type.TYPE_PLANE, "Training"), PLANE_ACRO(4, Type.TYPE_PLANE, "Acro"), PLANE_FLY_BY_WIRE_A(5, Type.TYPE_PLANE, "FBW A"), PLANE_FLY_BY_WIRE_B(6, Type.TYPE_PLANE, "FBW B"), PLANE_CRUISE(7, Type.TYPE_PLANE, "Cruise"), PLANE_AUTOTUNE(8, Type.TYPE_PLANE, "Autotune"), PLANE_AUTO(10, Type.TYPE_PLANE, "Auto"), PLANE_RTL(11, Type.TYPE_PLANE, "RTL"), PLANE_LOITER(12, Type.TYPE_PLANE, "Loiter"), PLANE_GUIDED(15, Type.TYPE_PLANE, "Guided"), COPTER_STABILIZE(0, Type.TYPE_COPTER, "Stabilize"), COPTER_ACRO(1, Type.TYPE_COPTER, "Acro"), COPTER_ALT_HOLD(2, Type.TYPE_COPTER, "Alt Hold"), COPTER_AUTO(3, Type.TYPE_COPTER, "Auto"), COPTER_GUIDED(4, Type.TYPE_COPTER, "Guided"), COPTER_LOITER(5, Type.TYPE_COPTER, "Loiter"), COPTER_RTL(6, Type.TYPE_COPTER, "RTL"), COPTER_CIRCLE(7, Type.TYPE_COPTER, "Circle"), COPTER_LAND(9, Type.TYPE_COPTER, "Land"), COPTER_DRIFT(11, Type.TYPE_COPTER, "Drift"), COPTER_SPORT(13, Type.TYPE_COPTER, "Sport"), COPTER_FLIP(14, Type.TYPE_COPTER, "Flip"), COPTER_AUTOTUNE(15, Type.TYPE_COPTER, "Autotune"), COPTER_POSHOLD(16, Type.TYPE_COPTER, "PosHold"), ROVER_MANUAL(0, Type.TYPE_ROVER, "Manual"), ROVER_LEARNING(2, Type.TYPE_ROVER, "Learning"), ROVER_STEERING(3, Type.TYPE_ROVER, "Steering"), ROVER_HOLD(4, Type.TYPE_ROVER, "Hold"), ROVER_AUTO(10, Type.TYPE_ROVER, "Auto"), ROVER_RTL(11, Type.TYPE_ROVER, "RTL"), ROVER_GUIDED(15, Type.TYPE_ROVER, "Guided"), ROVER_INITIALIZING(16, Type.TYPE_ROVER, "Initializing"); private final int mode; private final int droneType; private final String label; VehicleMode(int mode, int droneType, String label){ this.mode = mode; this.droneType = droneType; this.label = label; } public int getMode() { return mode; } public int getDroneType() { return droneType; } public String getLabel() { return label; } @Override public int describeContents(){ return 0; } @Override public void writeToParcel(final Parcel dest, final int flags){ dest.writeString(name()); } public static final Creator<VehicleMode> CREATOR = new Creator<VehicleMode>() { @Override public VehicleMode createFromParcel(Parcel source) { return VehicleMode.valueOf(source.readString()); } @Override public VehicleMode[] newArray(int size) { return new VehicleMode[size]; } }; }
lib/src/com/ox3dr/services/android/lib/drone/property/VehicleMode.java
package com.ox3dr.services.android.lib.drone.property; import android.os.Parcel; import android.os.Parcelable; /** * Created by fhuya on 10/28/14. */ public class VehicleMode implements Parcelable { private final int mode; private final int droneType; private final String label; public VehicleMode(int mode, int droneType, String label){ this.mode = mode; this.droneType = droneType; this.label = label; } public int getMode() { return mode; } public int getDroneType() { return droneType; } public String getLabel() { return label; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(this.mode); dest.writeInt(this.droneType); dest.writeString(this.label); } private VehicleMode(Parcel in) { this.mode = in.readInt(); this.droneType = in.readInt(); this.label = in.readString(); } public static final Parcelable.Creator<VehicleMode> CREATOR = new Parcelable.Creator<VehicleMode>() { public VehicleMode createFromParcel(Parcel source) { return new VehicleMode(source); } public VehicleMode[] newArray(int size) { return new VehicleMode[size]; } }; }
updated structure of the VehicleMode property.
lib/src/com/ox3dr/services/android/lib/drone/property/VehicleMode.java
updated structure of the VehicleMode property.
<ide><path>ib/src/com/ox3dr/services/android/lib/drone/property/VehicleMode.java <ide> /** <ide> * Created by fhuya on 10/28/14. <ide> */ <del>public class VehicleMode implements Parcelable { <add>public enum VehicleMode implements Parcelable { <add> <add> PLANE_MANUAL(0, Type.TYPE_PLANE, "Manual"), <add> PLANE_CIRCLE(1, Type.TYPE_PLANE, "Circle"), <add> PLANE_STABILIZE(2, Type.TYPE_PLANE, "Stabilize"), <add> PLANE_TRAINING(3, Type.TYPE_PLANE, "Training"), <add> PLANE_ACRO(4, Type.TYPE_PLANE, "Acro"), <add> PLANE_FLY_BY_WIRE_A(5, Type.TYPE_PLANE, "FBW A"), <add> PLANE_FLY_BY_WIRE_B(6, Type.TYPE_PLANE, "FBW B"), <add> PLANE_CRUISE(7, Type.TYPE_PLANE, "Cruise"), <add> PLANE_AUTOTUNE(8, Type.TYPE_PLANE, "Autotune"), <add> PLANE_AUTO(10, Type.TYPE_PLANE, "Auto"), <add> PLANE_RTL(11, Type.TYPE_PLANE, "RTL"), <add> PLANE_LOITER(12, Type.TYPE_PLANE, "Loiter"), <add> PLANE_GUIDED(15, Type.TYPE_PLANE, "Guided"), <add> <add> COPTER_STABILIZE(0, Type.TYPE_COPTER, "Stabilize"), <add> COPTER_ACRO(1, Type.TYPE_COPTER, "Acro"), <add> COPTER_ALT_HOLD(2, Type.TYPE_COPTER, "Alt Hold"), <add> COPTER_AUTO(3, Type.TYPE_COPTER, "Auto"), <add> COPTER_GUIDED(4, Type.TYPE_COPTER, "Guided"), <add> COPTER_LOITER(5, Type.TYPE_COPTER, "Loiter"), <add> COPTER_RTL(6, Type.TYPE_COPTER, "RTL"), <add> COPTER_CIRCLE(7, Type.TYPE_COPTER, "Circle"), <add> COPTER_LAND(9, Type.TYPE_COPTER, "Land"), <add> COPTER_DRIFT(11, Type.TYPE_COPTER, "Drift"), <add> COPTER_SPORT(13, Type.TYPE_COPTER, "Sport"), <add> COPTER_FLIP(14, Type.TYPE_COPTER, "Flip"), <add> COPTER_AUTOTUNE(15, Type.TYPE_COPTER, "Autotune"), <add> COPTER_POSHOLD(16, Type.TYPE_COPTER, "PosHold"), <add> <add> ROVER_MANUAL(0, Type.TYPE_ROVER, "Manual"), <add> ROVER_LEARNING(2, Type.TYPE_ROVER, "Learning"), <add> ROVER_STEERING(3, Type.TYPE_ROVER, "Steering"), <add> ROVER_HOLD(4, Type.TYPE_ROVER, "Hold"), <add> ROVER_AUTO(10, Type.TYPE_ROVER, "Auto"), <add> ROVER_RTL(11, Type.TYPE_ROVER, "RTL"), <add> ROVER_GUIDED(15, Type.TYPE_ROVER, "Guided"), <add> ROVER_INITIALIZING(16, Type.TYPE_ROVER, "Initializing"); <add> <ide> <ide> private final int mode; <ide> private final int droneType; <ide> private final String label; <ide> <del> public VehicleMode(int mode, int droneType, String label){ <add> VehicleMode(int mode, int droneType, String label){ <ide> this.mode = mode; <ide> this.droneType = droneType; <ide> this.label = label; <ide> } <ide> <ide> @Override <del> public int describeContents() { <add> public int describeContents(){ <ide> return 0; <ide> } <ide> <ide> @Override <del> public void writeToParcel(Parcel dest, int flags) { <del> dest.writeInt(this.mode); <del> dest.writeInt(this.droneType); <del> dest.writeString(this.label); <add> public void writeToParcel(final Parcel dest, final int flags){ <add> dest.writeString(name()); <ide> } <ide> <del> private VehicleMode(Parcel in) { <del> this.mode = in.readInt(); <del> this.droneType = in.readInt(); <del> this.label = in.readString(); <del> } <del> <del> public static final Parcelable.Creator<VehicleMode> CREATOR = new Parcelable.Creator<VehicleMode>() { <add> public static final Creator<VehicleMode> CREATOR = new Creator<VehicleMode>() { <add> @Override <ide> public VehicleMode createFromParcel(Parcel source) { <del> return new VehicleMode(source); <add> return VehicleMode.valueOf(source.readString()); <ide> } <ide> <add> @Override <ide> public VehicleMode[] newArray(int size) { <ide> return new VehicleMode[size]; <ide> } <ide> }; <ide> } <add>
JavaScript
mit
33cd32c6b315388f93a7a5b21c741f28a12c1405
0
spraakbanken/korp-frontend,spraakbanken/korp-frontend,spraakbanken/korp-frontend
/** @format */ import statemachine from "@/statemachine" import { setDefaultConfigValues } from "./settings" const korpFailImg = require("../img/korp_fail.svg") const deparam = require("jquery-deparam") import jStorage from "../lib/jstorage" window.authenticationProxy = new model.AuthenticationProxy() window.timeProxy = new model.TimeProxy() const creds = jStorage.get("creds") if (creds) { authenticationProxy.loginObj = creds statemachine.send("USER_FOUND", creds) } else { statemachine.send("USER_NOT_FOUND") } const loc_dfd = window.initLocales() $(document).keyup(function (event) { if (event.keyCode === 27) { if (kwicResults) { kwicResults.abort() } if ("lemgramResults" in window) { lemgramResults.abort() } if (statsResults) { statsResults.abort() } } }) const corpusSettingsPromise = new Promise((resolve, reject) => { const createSplashScreen = () => { const korpLogo = require("../img/korplogo_block.svg") const splash = document.getElementById("preload") splash.innerHTML = `<img class="splash" height="300" width="300" src="${korpLogo}" />` } const createErrorScreen = () => { const korpFail = require("../img/korp_fail.svg") const elem = document.getElementById("preload") elem.innerHTML = ` <div class="absolute top-1/3 text-center"> <img class="block" height="300" width="300" src="${korpFail}" /> Sorry, Korp doesn't seem to work right now </div> ` } createSplashScreen() const labParam = window.isLab ? "&include_lab" : "" fetch(`${settings["korp_backend_url"]}/corpus_config?mode=${window.currentMode}${labParam}`) .then((response) => { if (!response.ok) { console.error("Something wrong with corpus config", response.statusText) createErrorScreen() } response.json().then((modeSettings) => { window.currentModeParallel = modeSettings.parallel // only if the current mode is parallel, we load the special code required if (window.currentModeParallel) { require("./parallel/corpus_listing.js") require("./parallel/kwic_results.js") require("./parallel/stats_proxy.js") } function rename(obj, from, to) { if (obj[from]) { obj[to] = obj[from] delete obj[from] } } rename(modeSettings["attributes"], "pos_attributes", "attributes") // take the backend configuration format for attributes and expand it // TODO the internal representation should be changed to a new, more compact one. for (const corpusId in modeSettings["corpora"]) { const corpus = modeSettings["corpora"][corpusId] rename(corpus, "pos_attributes", "attributes") for (const attrType of ["attributes", "struct_attributes", "custom_attributes"]) { const attrList = corpus[attrType] const attrs = {} for (const attrIdx in attrList) { const attr = modeSettings["attributes"][attrType][attrList[attrIdx]] attrs[attr.name] = attr } // attrs is an object of attribute settings corpus[attrType] = attrs // attrList is an ordered list of the preferred order of attributes corpus[`_${attrType}_order`] = attrList } // TODO use the new format instead // remake the new format of witihns and contex to the old const sortingArr = ["sentence", "paragraph", "text", "1 sentence", "1 paragraph", "1 text"] function contextWithinFix(list) { // sort the list so that sentence is before paragraph list.sort((a, b) => sortingArr.indexOf(a.value) - sortingArr.indexOf(b.value)) const res = {} for (const elem of list) { res[elem.value] = elem.value } return res } corpus["within"] = contextWithinFix(corpus["within"]) corpus["context"] = contextWithinFix(corpus["context"]) } delete modeSettings["attributes"] if (!modeSettings["folders"]) { modeSettings["folders"] = {} } resolve(modeSettings) document.getElementById("preload").remove() }) }) .catch(() => createErrorScreen()) }) Promise.all([loc_dfd, corpusSettingsPromise]).then(([locData, modeSettings]) => { _.assign(window.settings, modeSettings) setDefaultConfigValues() const corpora = settings.corpora if (!window.currentModeParallel) { settings.corpusListing = new CorpusListing(corpora) } else { settings.corpusListing = new ParallelCorpusListing(corpora) } // rewriting old language codes to new ones if (location.hash.includes("lang=")) { const match = /lang\=(.*?)(&|$)/.exec(location.hash) if (match) { const lang = match[1] if (settings.isoLanguages[lang]) { location.hash = location.hash.replace(`lang=${lang}`, `lang=${settings.isoLanguages[lang]}`) } } } try { angular.bootstrap(document, ["korpApp"]) } catch (error) { c.error(error) } try { const corpus = locationSearch()["corpus"] if (corpus) { settings.corpusListing.select(corpus.split(",")) } view.updateSearchHistory() } catch (error1) { c.error("ERROR setting corpora from location", error1) } if (isLab) { $("body").addClass("lab") } $("body").addClass(`mode-${window.currentMode}`) util.browserWarn() $("#search_history").change(function (event) { const target = $(this).find(":selected") if (_.includes(["http://", "https:/"], target.val().slice(0, 7))) { location.href = target.val() } else if (target.is(".clear")) { jStorage.set("searches", []) view.updateSearchHistory() } }) let prevFragment = {} // Note that this is _not_ window.onhashchange (lowercase only) and is not called by the browser window.onHashChange = function (event, isInit) { const hasChanged = (key) => prevFragment[key] !== locationSearch()[key] if (hasChanged("lang")) { const newLang = locationSearch().lang || settings["default_language"] $("body").scope().lang = newLang window.lang = newLang util.localize() $("#languages").radioList("select", newLang) } if (isInit) { util.localize() } prevFragment = _.extend({}, locationSearch()) } $("#languages").radioList({ change() { const currentLang = $(this).radioList("getSelected").data("mode") locationSearch({ lang: currentLang !== settings["default_language"] ? currentLang : null, }) }, // TODO: this does nothing? selected: settings["default_language"], }) setTimeout(() => window.onHashChange(null, true), 0) $("#main").animate({ opacity: 1 }, function () { $(this).css("opacity", "") }) })
app/scripts/main.js
/** @format */ import statemachine from "@/statemachine" import { setDefaultConfigValues } from "./settings" const korpFailImg = require("../img/korp_fail.svg") const deparam = require("jquery-deparam") import jStorage from "../lib/jstorage" window.authenticationProxy = new model.AuthenticationProxy() window.timeProxy = new model.TimeProxy() const creds = jStorage.get("creds") if (creds) { authenticationProxy.loginObj = creds statemachine.send("USER_FOUND", creds) } else { statemachine.send("USER_NOT_FOUND") } // rewriting old url format to the angular one if (location.hash.length && location.hash[1] !== "?") { location.hash = `#?${_.trimStart(location.hash, "#")}` } $.ajaxSetup({ dataType: "json", traditional: true, }) $.ajaxPrefilter("json", function (options) { if (options.crossDomain && !$.support.cors) { return "jsonp" } }) const loc_dfd = window.initLocales() $(document).keyup(function (event) { if (event.keyCode === 27) { if (kwicResults) { kwicResults.abort() } if ("lemgramResults" in window) { lemgramResults.abort() } if (statsResults) { statsResults.abort() } } }) const corpusSettingsPromise = new Promise((resolve, reject) => { const createSplashScreen = () => { const korpLogo = require("../img/korplogo_block.svg") const splash = document.getElementById("preload") splash.innerHTML = `<img class="splash" height="300" width="300" src="${korpLogo}" />` } const createErrorScreen = () => { const korpFail = require("../img/korp_fail.svg") const elem = document.getElementById("preload") elem.innerHTML = ` <div class="absolute top-1/3 text-center"> <img class="block" height="300" width="300" src="${korpFail}" /> Sorry, Korp doesn't seem to work right now </div> ` } createSplashScreen() const labParam = window.isLab ? "&include_lab" : "" fetch(`${settings["korp_backend_url"]}/corpus_config?mode=${window.currentMode}${labParam}`) .then((response) => { if (!response.ok) { console.error("Something wrong with corpus config", response.statusText) createErrorScreen() } response.json().then((modeSettings) => { window.currentModeParallel = modeSettings.parallel // only if the current mode is parallel, we load the special code required if (window.currentModeParallel) { require("./parallel/corpus_listing.js") require("./parallel/kwic_results.js") require("./parallel/stats_proxy.js") } function rename(obj, from, to) { if (obj[from]) { obj[to] = obj[from] delete obj[from] } } rename(modeSettings["attributes"], "pos_attributes", "attributes") // take the backend configuration format for attributes and expand it // TODO the internal representation should be changed to a new, more compact one. for (const corpusId in modeSettings["corpora"]) { const corpus = modeSettings["corpora"][corpusId] rename(corpus, "pos_attributes", "attributes") for (const attrType of ["attributes", "struct_attributes", "custom_attributes"]) { const attrList = corpus[attrType] const attrs = {} for (const attrIdx in attrList) { const attr = modeSettings["attributes"][attrType][attrList[attrIdx]] attrs[attr.name] = attr } // attrs is an object of attribute settings corpus[attrType] = attrs // attrList is an ordered list of the preferred order of attributes corpus[`_${attrType}_order`] = attrList } // TODO use the new format instead // remake the new format of witihns and contex to the old const sortingArr = ["sentence", "paragraph", "text", "1 sentence", "1 paragraph", "1 text"] function contextWithinFix(list) { // sort the list so that sentence is before paragraph list.sort((a, b) => sortingArr.indexOf(a.value) - sortingArr.indexOf(b.value)) const res = {} for (const elem of list) { res[elem.value] = elem.value } return res } corpus["within"] = contextWithinFix(corpus["within"]) corpus["context"] = contextWithinFix(corpus["context"]) } delete modeSettings["attributes"] if (!modeSettings["folders"]) { modeSettings["folders"] = {} } resolve(modeSettings) document.getElementById("preload").remove() }) }) .catch(() => createErrorScreen()) }) Promise.all([loc_dfd, corpusSettingsPromise]).then(([locData, modeSettings]) => { _.assign(window.settings, modeSettings) setDefaultConfigValues() const corpora = settings.corpora if (!window.currentModeParallel) { settings.corpusListing = new CorpusListing(corpora) } else { settings.corpusListing = new ParallelCorpusListing(corpora) } // rewriting old language codes to new ones if (location.hash.includes("lang=")) { const match = /lang\=(.*?)(&|$)/.exec(location.hash) if (match) { const lang = match[1] if (settings.isoLanguages[lang]) { location.hash = location.hash.replace(`lang=${lang}`, `lang=${settings.isoLanguages[lang]}`) } } } try { angular.bootstrap(document, ["korpApp"]) } catch (error) { c.error(error) } try { const corpus = locationSearch()["corpus"] if (corpus) { settings.corpusListing.select(corpus.split(",")) } view.updateSearchHistory() } catch (error1) { c.error("ERROR setting corpora from location", error1) } if (isLab) { $("body").addClass("lab") } $("body").addClass(`mode-${window.currentMode}`) util.browserWarn() $("#search_history").change(function (event) { const target = $(this).find(":selected") if (_.includes(["http://", "https:/"], target.val().slice(0, 7))) { location.href = target.val() } else if (target.is(".clear")) { jStorage.set("searches", []) view.updateSearchHistory() } }) let prevFragment = {} // Note that this is _not_ window.onhashchange (lowercase only) and is not called by the browser window.onHashChange = function (event, isInit) { const hasChanged = (key) => prevFragment[key] !== locationSearch()[key] if (hasChanged("lang")) { const newLang = locationSearch().lang || settings["default_language"] $("body").scope().lang = newLang window.lang = newLang util.localize() $("#languages").radioList("select", newLang) } if (isInit) { util.localize() } prevFragment = _.extend({}, locationSearch()) } $("#languages").radioList({ change() { const currentLang = $(this).radioList("getSelected").data("mode") locationSearch({ lang: currentLang !== settings["default_language"] ? currentLang : null, }) }, // TODO: this does nothing? selected: settings["default_language"], }) setTimeout(() => window.onHashChange(null, true), 0) $("#main").animate({ opacity: 1 }, function () { $(this).css("opacity", "") }) })
remove some old stuff
app/scripts/main.js
remove some old stuff
<ide><path>pp/scripts/main.js <ide> } else { <ide> statemachine.send("USER_NOT_FOUND") <ide> } <del> <del>// rewriting old url format to the angular one <del>if (location.hash.length && location.hash[1] !== "?") { <del> location.hash = `#?${_.trimStart(location.hash, "#")}` <del>} <del> <del>$.ajaxSetup({ <del> dataType: "json", <del> traditional: true, <del>}) <del> <del>$.ajaxPrefilter("json", function (options) { <del> if (options.crossDomain && !$.support.cors) { <del> return "jsonp" <del> } <del>}) <ide> <ide> const loc_dfd = window.initLocales() <ide> $(document).keyup(function (event) {
JavaScript
mit
bf278dbb9cf1ef1819a5bda85db5dd4bd2d30d43
0
igvteam/igv.js,igvteam/igv.js
/* * The MIT License (MIT) * * Copyright (c) 2014 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ var igv = (function (igv) { igv.RulerTrack = function () { this.height = 40; this.name = ""; this.id = "ruler"; this.disableButtons = true; this.ignoreTrackMenu = true; this.order = -Number.MAX_VALUE; this.supportsWholeGenome = true; this.rulerSweepers = []; this.removable = false; }; igv.RulerTrack.prototype.updateLocusLabel = function () { var self = this; this.trackView.viewports.forEach(function (viewport) { var str; str = viewport.genomicState.referenceFrame.showLocus(viewport.$viewport.width()); // console.log('ruler update label - viewport ' + viewport.id + ' ' + str); viewport.$rulerLabel.text(str); }); }; igv.RulerTrack.prototype.appendMultiPanelCloseButton = function ($viewport, genomicState) { var $close, $closeButton; $viewport.addClass('igv-viewport-ruler'); $close = $('<div class="igv-viewport-fa-close">'); $viewport.append($close); $closeButton = $('<div>'); $closeButton.append(igv.createIcon("times-circle")); $close.append($closeButton); $close.click(function (e) { igv.browser.removeMultiLocusPanelWithGenomicState(genomicState, true); }); }; igv.RulerTrack.prototype.removeRulerSweeperWithLocusIndex = function (index) { this.rulerSweepers.splice(index, 1); }; igv.RulerTrack.prototype.getFeatures = function (chr, bpStart, bpEnd) { return Promise.resolve([]); }; igv.RulerTrack.prototype.draw = function (options) { var self = this, key, rulerSweeper, $viewportContent, pixelWidthBP, tick, label, shim, center, size, rect, tickHeight; key = igv.browser.genomicStateList.indexOf(options.genomicState).toString(); rulerSweeper = this.rulerSweepers[key]; if (!rulerSweeper) { console.log("No rulerSweeper for key: " + key); return; } $viewportContent = $(rulerSweeper.viewport.contentDiv); if ('all' === options.referenceFrame.chrName.toLowerCase()) { $viewportContent.find('canvas').hide(); $viewportContent.find('.igv-whole-genome-container').show(); rulerSweeper.disableMouseHandlers(); } else { $viewportContent.find('.igv-whole-genome-container').hide(); $viewportContent.find('canvas').show(); rulerSweeper.addMouseHandlers(); tickHeight = 6; shim = 2; pixelWidthBP = 1 + Math.floor(options.referenceFrame.toBP(options.pixelWidth)); tick = new igv.Tick(pixelWidthBP, options); tick.drawTicks(options, tickHeight, shim, this.height); igv.graphics.strokeLine(options.context, 0, this.height - shim, options.pixelWidth, this.height - shim); } }; igv.Tick = function (pixelWidthBP, options) { initialize.call(this, pixelWidthBP, options); function initialize(pixelWidthBP, options) { var numberOfZeroes, majorUnit, unitMultiplier, numberOfMajorTicks, str, labelWidthBP; if (pixelWidthBP < 10) { set.call(this, 1, "bp", 1); } numberOfZeroes = Math.floor(Math.log10(pixelWidthBP)); if (numberOfZeroes > 9) { majorUnit = "gb"; unitMultiplier = 1e9; } else if (numberOfZeroes > 6) { majorUnit = "mb"; unitMultiplier = 1e6; } else if (numberOfZeroes > 3) { majorUnit = "kb"; unitMultiplier = 1e3; } else { majorUnit = "bp"; unitMultiplier = 1; } str = igv.numberFormatter(Math.floor(pixelWidthBP / unitMultiplier)) + " " + majorUnit; this.labelWidthBP = Math.round(options.referenceFrame.toBP(options.context.measureText(str).width)); numberOfMajorTicks = pixelWidthBP / Math.pow(10, numberOfZeroes - 1); if (numberOfMajorTicks < 25) { set.call(this, Math.pow(10, numberOfZeroes - 1), majorUnit, unitMultiplier); } else { set.call(this, Math.pow(10, numberOfZeroes) / 2, majorUnit, unitMultiplier); } // this.description( (Math.floor(numberOfMajorTicks)) ); } function set(majorTick, majorUnit, unitMultiplier) { this.majorTick = majorTick; this.majorUnit = majorUnit; this.halfTick = majorTick / 2; this.quarterTick = majorTick / 4; this.minorTick = majorTick / 10.0; this.unitMultiplier = unitMultiplier; } }; igv.Tick.prototype.drawTicks = function (options, tickHeight, shim, height) { var numberOfTicks, bp, pixel, label, labelWidth, labelX, numer, floored; // major ticks numberOfTicks = Math.floor(options.bpStart / this.majorTick) - 1; pixel = 0; while (pixel < options.pixelWidth) { bp = Math.floor(numberOfTicks * this.majorTick); pixel = Math.round(options.referenceFrame.toPixels((bp - 1) - options.bpStart + 0.5)); label = igv.numberFormatter(Math.floor(bp / this.unitMultiplier)) + " " + this.majorUnit; labelWidth = options.context.measureText(label).width; labelX = pixel - labelWidth / 2; igv.graphics.fillText(options.context, label, labelX, height - (tickHeight / 0.75)); igv.graphics.strokeLine(options.context, pixel, height - tickHeight, pixel, height - shim); ++numberOfTicks; } // major ticks numberOfTicks = Math.floor(options.bpStart / this.halfTick) - 1; pixel = 0; while (pixel < options.pixelWidth) { bp = Math.floor(numberOfTicks * this.halfTick); pixel = Math.round(options.referenceFrame.toPixels((bp - 1) - options.bpStart + 0.5)); numer = bp / this.unitMultiplier; floored = Math.floor(numer); // console.log(numer - floored); if (numer === floored && (this.majorTick / this.labelWidthBP) > 8) { label = igv.numberFormatter(Math.floor(numer)) + " " + this.majorUnit; labelWidth = options.context.measureText(label).width; labelX = pixel - labelWidth / 2; igv.graphics.fillText(options.context, label, labelX, height - (tickHeight / 0.75)); } igv.graphics.strokeLine(options.context, pixel, height - tickHeight, pixel, height - shim); ++numberOfTicks; } }; igv.Tick.prototype.description = function (blurb) { console.log((blurb || '') + ' tick ' + igv.numberFormatter(this.majorTick) + ' label width ' + igv.numberFormatter(this.labelWidthBP) + ' multiplier ' + this.unitMultiplier); }; return igv; })(igv || {});
js/rulerTrack.js
/* * The MIT License (MIT) * * Copyright (c) 2014 Broad Institute * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ var igv = (function (igv) { igv.RulerTrack = function () { this.height = 40; this.name = ""; this.id = "ruler"; this.disableButtons = true; this.ignoreTrackMenu = true; this.order = -Number.MAX_VALUE; this.supportsWholeGenome = true; this.rulerSweepers = []; this.removable = false; }; igv.RulerTrack.prototype.updateLocusLabel = function () { var self = this; this.trackView.viewports.forEach(function (viewport) { var str; str = viewport.genomicState.referenceFrame.showLocus(viewport.$viewport.width()); // console.log('ruler update label - viewport ' + viewport.id + ' ' + str); viewport.$rulerLabel.text( str ); }); }; igv.RulerTrack.prototype.appendMultiPanelCloseButton = function ($viewport, genomicState) { var $close, $closeButton; $viewport.addClass('igv-viewport-ruler'); $close = $('<div class="igv-viewport-fa-close">'); $viewport.append($close); $closeButton = $('<div>'); $closeButton.append(igv.createIcon("times-circle")); $close.append($closeButton); $close.click(function (e) { igv.browser.removeMultiLocusPanelWithGenomicState(genomicState, true); }); }; igv.RulerTrack.prototype.removeRulerSweeperWithLocusIndex = function (index) { this.rulerSweepers.splice(index, 1); }; igv.RulerTrack.prototype.getFeatures = function (chr, bpStart, bpEnd) { return Promise.resolve([]); }; igv.RulerTrack.prototype.draw = function (options) { var self = this, key, rulerSweeper, $viewportContent, pixelWidthBP, tick, label, shim, center, size, rect, tickHeight; key = igv.browser.genomicStateList.indexOf(options.genomicState).toString(); rulerSweeper = this.rulerSweepers[ key ]; $viewportContent = $(rulerSweeper.viewport.contentDiv); if ('all' === options.referenceFrame.chrName.toLowerCase()) { $viewportContent.find('canvas').hide(); $viewportContent.find('.igv-whole-genome-container').show(); rulerSweeper.disableMouseHandlers(); } else { $viewportContent.find('.igv-whole-genome-container').hide(); $viewportContent.find('canvas').show(); rulerSweeper.addMouseHandlers(); tickHeight = 6; shim = 2; pixelWidthBP = 1 + Math.floor(options.referenceFrame.toBP(options.pixelWidth)); tick = new igv.Tick(pixelWidthBP, options); tick.drawTicks(options, tickHeight, shim, this.height); igv.graphics.strokeLine(options.context, 0, this.height - shim, options.pixelWidth, this.height - shim); } }; igv.Tick = function (pixelWidthBP, options) { initialize.call(this, pixelWidthBP, options); function initialize(pixelWidthBP, options) { var numberOfZeroes, majorUnit, unitMultiplier, numberOfMajorTicks, str, labelWidthBP; if (pixelWidthBP < 10) { set.call(this, 1, "bp", 1); } numberOfZeroes = Math.floor(Math.log10(pixelWidthBP)); if (numberOfZeroes > 9) { majorUnit = "gb"; unitMultiplier = 1e9; } else if (numberOfZeroes > 6) { majorUnit = "mb"; unitMultiplier = 1e6; } else if (numberOfZeroes > 3) { majorUnit = "kb"; unitMultiplier = 1e3; } else { majorUnit = "bp"; unitMultiplier = 1; } str = igv.numberFormatter(Math.floor(pixelWidthBP / unitMultiplier)) + " " + majorUnit; this.labelWidthBP = Math.round(options.referenceFrame.toBP(options.context.measureText( str ).width)); numberOfMajorTicks = pixelWidthBP / Math.pow(10, numberOfZeroes - 1); if (numberOfMajorTicks < 25) { set.call(this, Math.pow(10, numberOfZeroes - 1), majorUnit, unitMultiplier); } else { set.call(this, Math.pow(10, numberOfZeroes) / 2, majorUnit, unitMultiplier); } // this.description( (Math.floor(numberOfMajorTicks)) ); } function set(majorTick, majorUnit, unitMultiplier) { this.majorTick = majorTick; this.majorUnit = majorUnit; this.halfTick = majorTick / 2; this.quarterTick = majorTick / 4; this.minorTick = majorTick / 10.0; this.unitMultiplier = unitMultiplier; } }; igv.Tick.prototype.drawTicks = function (options, tickHeight, shim, height) { var numberOfTicks, bp, pixel, label, labelWidth, labelX, numer, floored; // major ticks numberOfTicks = Math.floor(options.bpStart/this.majorTick) - 1; pixel = 0; while (pixel < options.pixelWidth) { bp = Math.floor(numberOfTicks * this.majorTick); pixel = Math.round(options.referenceFrame.toPixels((bp - 1) - options.bpStart + 0.5)); label = igv.numberFormatter(Math.floor(bp / this.unitMultiplier)) + " " + this.majorUnit; labelWidth = options.context.measureText(label).width; labelX = pixel - labelWidth / 2; igv.graphics.fillText(options.context, label, labelX, height - (tickHeight / 0.75)); igv.graphics.strokeLine(options.context, pixel, height - tickHeight, pixel, height - shim); ++numberOfTicks; } // major ticks numberOfTicks = Math.floor(options.bpStart/this.halfTick) - 1; pixel = 0; while (pixel < options.pixelWidth) { bp = Math.floor(numberOfTicks * this.halfTick); pixel = Math.round(options.referenceFrame.toPixels((bp - 1) - options.bpStart + 0.5)); numer = bp / this.unitMultiplier; floored = Math.floor(numer); // console.log(numer - floored); if (numer === floored && (this.majorTick / this.labelWidthBP) > 8) { label = igv.numberFormatter(Math.floor(numer)) + " " + this.majorUnit; labelWidth = options.context.measureText(label).width; labelX = pixel - labelWidth / 2; igv.graphics.fillText(options.context, label, labelX, height - (tickHeight / 0.75)); } igv.graphics.strokeLine(options.context, pixel, height - tickHeight, pixel, height - shim); ++numberOfTicks; } }; igv.Tick.prototype.description = function (blurb) { console.log((blurb || '') + ' tick ' + igv.numberFormatter(this.majorTick) + ' label width ' + igv.numberFormatter(this.labelWidthBP) + ' multiplier ' + this.unitMultiplier); }; return igv; })(igv || {});
null check in rulerTrack initialization
js/rulerTrack.js
null check in rulerTrack initialization
<ide><path>s/rulerTrack.js <ide> str = viewport.genomicState.referenceFrame.showLocus(viewport.$viewport.width()); <ide> <ide> // console.log('ruler update label - viewport ' + viewport.id + ' ' + str); <del> viewport.$rulerLabel.text( str ); <add> viewport.$rulerLabel.text(str); <ide> }); <ide> <ide> }; <ide> tickHeight; <ide> <ide> key = igv.browser.genomicStateList.indexOf(options.genomicState).toString(); <del> rulerSweeper = this.rulerSweepers[ key ]; <add> rulerSweeper = this.rulerSweepers[key]; <add> if (!rulerSweeper) { <add> console.log("No rulerSweeper for key: " + key); <add> return; <add> } <add> <ide> <ide> $viewportContent = $(rulerSweeper.viewport.contentDiv); <ide> <ide> } <ide> <ide> str = igv.numberFormatter(Math.floor(pixelWidthBP / unitMultiplier)) + " " + majorUnit; <del> this.labelWidthBP = Math.round(options.referenceFrame.toBP(options.context.measureText( str ).width)); <add> this.labelWidthBP = Math.round(options.referenceFrame.toBP(options.context.measureText(str).width)); <ide> <ide> numberOfMajorTicks = pixelWidthBP / Math.pow(10, numberOfZeroes - 1); <ide> <ide> floored; <ide> <ide> // major ticks <del> numberOfTicks = Math.floor(options.bpStart/this.majorTick) - 1; <add> numberOfTicks = Math.floor(options.bpStart / this.majorTick) - 1; <ide> pixel = 0; <ide> while (pixel < options.pixelWidth) { <ide> <ide> } <ide> <ide> // major ticks <del> numberOfTicks = Math.floor(options.bpStart/this.halfTick) - 1; <add> numberOfTicks = Math.floor(options.bpStart / this.halfTick) - 1; <ide> pixel = 0; <ide> while (pixel < options.pixelWidth) { <ide>
JavaScript
bsd-3-clause
f14edd8a0c38c59cc10fb218ba1e9875b9644996
0
exponent/react-native,arthuralee/react-native,pandiaraj44/react-native,exponentjs/react-native,myntra/react-native,arthuralee/react-native,hammerandchisel/react-native,exponentjs/react-native,exponentjs/react-native,javache/react-native,facebook/react-native,javache/react-native,exponentjs/react-native,myntra/react-native,pandiaraj44/react-native,myntra/react-native,pandiaraj44/react-native,exponent/react-native,arthuralee/react-native,javache/react-native,javache/react-native,pandiaraj44/react-native,janicduplessis/react-native,myntra/react-native,hoangpham95/react-native,hoangpham95/react-native,arthuralee/react-native,hoangpham95/react-native,janicduplessis/react-native,pandiaraj44/react-native,pandiaraj44/react-native,hammerandchisel/react-native,myntra/react-native,janicduplessis/react-native,javache/react-native,arthuralee/react-native,myntra/react-native,hammerandchisel/react-native,javache/react-native,facebook/react-native,hoangpham95/react-native,hoangpham95/react-native,hammerandchisel/react-native,facebook/react-native,exponent/react-native,hammerandchisel/react-native,javache/react-native,javache/react-native,myntra/react-native,janicduplessis/react-native,facebook/react-native,facebook/react-native,myntra/react-native,janicduplessis/react-native,janicduplessis/react-native,exponentjs/react-native,janicduplessis/react-native,exponentjs/react-native,myntra/react-native,hoangpham95/react-native,javache/react-native,exponentjs/react-native,janicduplessis/react-native,facebook/react-native,exponentjs/react-native,exponent/react-native,exponent/react-native,facebook/react-native,hammerandchisel/react-native,facebook/react-native,hoangpham95/react-native,pandiaraj44/react-native,exponent/react-native,hammerandchisel/react-native,facebook/react-native,exponent/react-native,exponent/react-native,hoangpham95/react-native,pandiaraj44/react-native,hammerandchisel/react-native
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @flow strict-local * @format */ 'use strict'; const ListViewDataSource = require('ListViewDataSource'); const React = require('React'); const ScrollView = require('ScrollView'); const StaticRenderer = require('StaticRenderer'); class ListViewMock extends React.Component<$FlowFixMeProps> { static latestRef: ?ListViewMock; static defaultProps = { /* $FlowFixMe(>=0.59.0 site=react_native_fb) This comment suppresses an * error caught by Flow 0.59 which was not caught before. Most likely, this * error is because an exported function parameter is missing an * annotation. Without an annotation, these parameters are uncovered by * Flow. */ renderScrollComponent: props => <ScrollView {...props} />, }; componentDidMount() { ListViewMock.latestRef = this; } render() { const {dataSource, renderFooter, renderHeader} = this.props; let rows = [ renderHeader && ( <StaticRenderer key="renderHeader" shouldUpdate={true} render={renderHeader} /> ), ]; const dataSourceRows = dataSource.rowIdentities.map( (rowIdentity, rowIdentityIndex) => { const sectionID = dataSource.sectionIdentities[rowIdentityIndex]; return rowIdentity.map((row, rowIndex) => ( <StaticRenderer key={'section_' + sectionID + '_row_' + rowIndex} shouldUpdate={true} render={this.props.renderRow.bind( null, dataSource.getRowData(rowIdentityIndex, rowIndex), sectionID, row, )} /> )); }, ); rows = [...rows, ...dataSourceRows]; renderFooter && rows.push( <StaticRenderer key="renderFooter" shouldUpdate={true} render={renderFooter} />, ); return this.props.renderScrollComponent({...this.props, children: rows}); } static DataSource = ListViewDataSource; } module.exports = ListViewMock;
Libraries/Lists/ListView/__mocks__/ListViewMock.js
/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @flow strict-local * @format */ 'use strict'; const ListViewDataSource = require('ListViewDataSource'); const React = require('React'); const ScrollView = require('ScrollView'); const StaticRenderer = require('StaticRenderer'); class ListViewMock extends React.Component<$FlowFixMeProps> { static latestRef: ?ListViewMock; static defaultProps = { /* $FlowFixMe(>=0.59.0 site=react_native_fb) This comment suppresses an * error caught by Flow 0.59 which was not caught before. Most likely, this * error is because an exported function parameter is missing an * annotation. Without an annotation, these parameters are uncovered by * Flow. */ renderScrollComponent: props => <ScrollView {...props} />, }; componentDidMount() { ListViewMock.latestRef = this; } render() { const {dataSource, renderFooter, renderHeader} = this.props; const rows = [renderHeader && renderHeader()]; const allRowIDs = dataSource.rowIdentities; for (let sectionIdx = 0; sectionIdx < allRowIDs.length; sectionIdx++) { const sectionID = dataSource.sectionIdentities[sectionIdx]; const rowIDs = allRowIDs[sectionIdx]; for (let rowIdx = 0; rowIdx < rowIDs.length; rowIdx++) { const rowID = rowIDs[rowIdx]; // Row IDs are only unique in a section rows.push( <StaticRenderer key={'section_' + sectionID + '_row_' + rowID} shouldUpdate={true} render={this.props.renderRow.bind( null, dataSource.getRowData(sectionIdx, rowIdx), sectionID, rowID, )} />, ); } } renderFooter && rows.push(renderFooter()); return this.props.renderScrollComponent({...this.props, children: rows}); } static DataSource = ListViewDataSource; } module.exports = ListViewMock;
Fix ListViewMock unique key error (#14894) Summary: Add key prop to renderHeader and renderFooter in ListViewMock. Fix unique key error when using jest snapshots. It closes #12762 Pull Request resolved: https://github.com/facebook/react-native/pull/14894 Reviewed By: TheSavior Differential Revision: D13396721 Pulled By: cpojer fbshipit-source-id: 5bbcb8157e3cd98fe07f2a037e1dbc06ab599c87
Libraries/Lists/ListView/__mocks__/ListViewMock.js
Fix ListViewMock unique key error (#14894)
<ide><path>ibraries/Lists/ListView/__mocks__/ListViewMock.js <ide> * Flow. */ <ide> renderScrollComponent: props => <ScrollView {...props} />, <ide> }; <add> <ide> componentDidMount() { <ide> ListViewMock.latestRef = this; <ide> } <add> <ide> render() { <ide> const {dataSource, renderFooter, renderHeader} = this.props; <del> const rows = [renderHeader && renderHeader()]; <del> const allRowIDs = dataSource.rowIdentities; <del> for (let sectionIdx = 0; sectionIdx < allRowIDs.length; sectionIdx++) { <del> const sectionID = dataSource.sectionIdentities[sectionIdx]; <del> const rowIDs = allRowIDs[sectionIdx]; <del> for (let rowIdx = 0; rowIdx < rowIDs.length; rowIdx++) { <del> const rowID = rowIDs[rowIdx]; <del> // Row IDs are only unique in a section <del> rows.push( <add> let rows = [ <add> renderHeader && ( <add> <StaticRenderer <add> key="renderHeader" <add> shouldUpdate={true} <add> render={renderHeader} <add> /> <add> ), <add> ]; <add> <add> const dataSourceRows = dataSource.rowIdentities.map( <add> (rowIdentity, rowIdentityIndex) => { <add> const sectionID = dataSource.sectionIdentities[rowIdentityIndex]; <add> return rowIdentity.map((row, rowIndex) => ( <ide> <StaticRenderer <del> key={'section_' + sectionID + '_row_' + rowID} <add> key={'section_' + sectionID + '_row_' + rowIndex} <ide> shouldUpdate={true} <ide> render={this.props.renderRow.bind( <ide> null, <del> dataSource.getRowData(sectionIdx, rowIdx), <add> dataSource.getRowData(rowIdentityIndex, rowIndex), <ide> sectionID, <del> rowID, <add> row, <ide> )} <del> />, <del> ); <del> } <del> } <del> renderFooter && rows.push(renderFooter()); <add> /> <add> )); <add> }, <add> ); <add> <add> rows = [...rows, ...dataSourceRows]; <add> renderFooter && <add> rows.push( <add> <StaticRenderer <add> key="renderFooter" <add> shouldUpdate={true} <add> render={renderFooter} <add> />, <add> ); <add> <ide> return this.props.renderScrollComponent({...this.props, children: rows}); <ide> } <ide> static DataSource = ListViewDataSource;
Java
epl-1.0
eadfc3b7ed729470852e911adc83068d5e0e67fe
0
CityOfLearning/ForgeEssentials,planetguy32/ForgeEssentials,aschmois/ForgeEssentialsMain,ForgeEssentials/ForgeEssentialsMain,Techjar/ForgeEssentials,liachmodded/ForgeEssentials
package com.ForgeEssentials.data.filesystem; import net.minecraft.server.MinecraftServer; import net.minecraft.src.IntegratedServer; import net.minecraftforge.common.Configuration; import net.minecraftforge.common.Property; import com.ForgeEssentials.core.ForgeEssentials; import com.ForgeEssentials.core.PlayerInfo; import com.ForgeEssentials.data.DataDriver; import com.ForgeEssentials.data.DataStorageManager; import com.ForgeEssentials.permission.Zone; import com.ForgeEssentials.util.FunctionHelper; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.Side; /** * Storage driver for filesystem (flat-file) persistence. * * @author MysteriousAges * */ public class FileSystemDataDriver extends DataDriver { public static final String driverType = "FileSystem"; private String baseFilePath; public static String newline = "\r\n"; public FileSystemDataDriver() { super(); DataDriver.instance = this; } @Override public boolean parseConfigs(Configuration config, String worldName) { Property prop; prop = config.get("Data.FileSystem", "useFEDataDir", false); prop.comment = "Set to true to use the '.minecraft/ForgeEssentials/saves' directory instead of a world. Server owners may wish to set this to true."; boolean useFEDir = prop.getBoolean(false); if (useFEDir) { this.baseFilePath = ForgeEssentials.FEDIR.toString() + "saves/" + worldName + "/"; } else { if (MinecraftServer.getServer() instanceof IntegratedServer) { // We are running from the client. Use the Client save directory. this.baseFilePath = "./saves/" + worldName + "/FEData/"; } else { // Dedicated server. Use the base path + world name. this.baseFilePath = "./" + worldName +"/FEData/"; } } config.save(); // Nothing to fail on. return true; } public String getBaseBath() { return this.baseFilePath; } /** * This function binds all DataAdapters * @param obj */ protected void registerAdapters() { this.map.put(PlayerInfo.class, new PlayerInfoDataAdapter()); this.map.put(Zone.class, new ZoneDataAdapter()); // Add additional flat-file storage classes here. } }
src/common/com/ForgeEssentials/data/filesystem/FileSystemDataDriver.java
package com.ForgeEssentials.data.filesystem; import net.minecraftforge.common.Configuration; import net.minecraftforge.common.Property; import com.ForgeEssentials.core.ForgeEssentials; import com.ForgeEssentials.core.PlayerInfo; import com.ForgeEssentials.data.DataDriver; import com.ForgeEssentials.data.DataStorageManager; import com.ForgeEssentials.permission.Zone; import com.ForgeEssentials.util.FunctionHelper; import cpw.mods.fml.common.FMLCommonHandler; import cpw.mods.fml.common.Side; /** * Storage driver for filesystem (flat-file) persistence. * * @author MysteriousAges * */ public class FileSystemDataDriver extends DataDriver { public static final String driverType = "FileSystem"; private String baseFilePath; public static String newline = "\r\n"; public FileSystemDataDriver() { super(); DataDriver.instance = this; } @Override public boolean parseConfigs(Configuration config, String worldName) { Property prop; prop = config.get("Data.FileSystem", "useFEDataDir", false); prop.comment = "Set to true to use the '.minecraft/ForgeEssentials/saves' directory instead of a world. Server owners may wish to set this to true."; boolean useFEDir = prop.getBoolean(false); if (useFEDir) { this.baseFilePath = ForgeEssentials.FEDIR.toString() + "saves/" + worldName + "/"; } else { if (Side.CLIENT == FMLCommonHandler.instance().getEffectiveSide()) { this.baseFilePath = "./saves/" + worldName + "/"; } else { this.baseFilePath = "./" + worldName +"/"; } } config.save(); // Nothing to fail on. return true; } public String getBaseBath() { return this.baseFilePath; } /** * This function binds all DataAdapters * @param obj */ protected void registerAdapters() { this.map.put(PlayerInfo.class, new PlayerInfoDataAdapter()); this.map.put(Zone.class, new ZoneDataAdapter()); // Add additional flat-file storage classes here. } }
Changes to Flat-file data driver directory. This method of determining server/client is much better.
src/common/com/ForgeEssentials/data/filesystem/FileSystemDataDriver.java
Changes to Flat-file data driver directory. This method of determining server/client is much better.
<ide><path>rc/common/com/ForgeEssentials/data/filesystem/FileSystemDataDriver.java <ide> package com.ForgeEssentials.data.filesystem; <ide> <add>import net.minecraft.server.MinecraftServer; <add>import net.minecraft.src.IntegratedServer; <ide> import net.minecraftforge.common.Configuration; <ide> import net.minecraftforge.common.Property; <ide> <ide> } <ide> else <ide> { <del> if (Side.CLIENT == FMLCommonHandler.instance().getEffectiveSide()) <add> if (MinecraftServer.getServer() instanceof IntegratedServer) <ide> { <del> this.baseFilePath = "./saves/" + worldName + "/"; <add> // We are running from the client. Use the Client save directory. <add> this.baseFilePath = "./saves/" + worldName + "/FEData/"; <ide> } <ide> else <ide> { <del> this.baseFilePath = "./" + worldName +"/"; <add> // Dedicated server. Use the base path + world name. <add> this.baseFilePath = "./" + worldName +"/FEData/"; <ide> } <ide> } <ide>
JavaScript
mit
f692aaff4b962505d0e05c761face48bd01ed151
0
david-prince/rockpaperscissors
angular.module('david.playground', []) .controller('DavidCtrl', ['$scope', function($scope) { console.log("David is here."); var computerChoice = function(){ var comChoice = Math.random(); if (comChoice <= .33){ comChoice = "rock"; } else if (comChoice >= .66){ comChoice = "paper"; } else { comChoice = "scissors"; } return comChoice; }; var whoWins = function(choice1,choice2){ var result = ""; if (choice1 === choice2) { result = "You tied with the computer!"; } else if (choice1 === "rock") { if (choice2 === "scissors") { result = "rock wins!"; } else { result = "paper wins!"; }; } else if (choice1 === "paper") { if (choice2 === "rock"){ result = "paper wins!"; } else { result = "scissors wins!"; }; } else if (choice1 === "scissors") { if (choice2 === "rock") { result = "rock wins!"; } else { result = "scissors wins!"; }; } return result; }; $scope.message = "Choose rock, paper, or scissors. Select your choice below."; $scope.choice = ""; $scope.buttonClick = function() { var computer = computerChoice(); var human = $scope.choice; var result = whoWins(human, computer); console.log(computer, human, result); $scope.message = result; }; }]) ;
src/app.js
angular.module('david.playground', []) .controller('DavidCtrl', ['$scope', function($scope) { console.log("David is here."); var computerChoice = function(){ var comChoice = Math.random(); if (comChoice <= .33){ comChoice = "rock"; } else if (comChoice >= .66){ comChoice = "paper"; } else { comChoice = "scissors"; } return comChoice; }; $scope.message = "Choose rock, paper, or scissors. Select your choice below."; $scope.choice = ""; $scope.buttonClick = function() { var bob = computerChoice(); console.log(bob); $scope.message = ""; }; }]) ;
Fix whoWins function and call it from buttonClick
src/app.js
Fix whoWins function and call it from buttonClick
<ide><path>rc/app.js <ide> } else { <ide> comChoice = "scissors"; <ide> } <del> return comChoice; <add> return comChoice; <ide> }; <add> <add> var whoWins = function(choice1,choice2){ <add> var result = ""; <add> if (choice1 === choice2) { <add> result = "You tied with the computer!"; <add> } else if (choice1 === "rock") { <add> if (choice2 === "scissors") { <add> result = "rock wins!"; <add> } else { <add> result = "paper wins!"; <add> }; <add> } else if (choice1 === "paper") { <add> if (choice2 === "rock"){ <add> result = "paper wins!"; <add> } else { <add> result = "scissors wins!"; <add> }; <add> } else if (choice1 === "scissors") { <add> if (choice2 === "rock") { <add> result = "rock wins!"; <add> } else { <add> result = "scissors wins!"; <add> }; <add> } <add> return result; <add> }; <add> <ide> $scope.message = "Choose rock, paper, or scissors. Select your choice below."; <ide> $scope.choice = ""; <ide> <ide> $scope.buttonClick = function() { <del> var bob = computerChoice(); <del> console.log(bob); <add> var computer = computerChoice(); <add> var human = $scope.choice; <ide> <del> $scope.message = ""; <add> var result = whoWins(human, computer); <add> console.log(computer, human, result); <add> <add> $scope.message = result; <ide> <ide> }; <ide>
JavaScript
apache-2.0
4a453bfe2421a79bdf2503e51e4877a7c6b6e169
0
nuest/o2r-muncher,o2r-project/o2r-muncher,nuest/o2r-muncher,o2r-project/o2r-muncher,nuest/o2r-muncher,nuest/o2r-muncher,o2r-project/o2r-muncher,o2r-project/o2r-muncher,o2r-project/o2r-muncher,nuest/o2r-muncher
/* * (C) Copyright 2017 o2r project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* eslint-env mocha */ const assert = require('chai').assert; const request = require('request'); const config = require('../config/config'); const createCompendiumPostRequest = require('./util').createCompendiumPostRequest; const publishCandidate = require('./util').publishCandidate; const startJob = require('./util').startJob; const mongojs = require('mongojs'); const fs = require('fs'); const sleep = require('sleep'); const unameCall = require('node-uname'); const path = require('path'); require("./setup"); const cookie_o2r = 's:C0LIrsxGtHOGHld8Nv2jedjL4evGgEHo.GMsWD5Vveq0vBt7/4rGeoH5Xx7Dd2pgZR9DvhKCyDTY'; const cookie_plain = 's:yleQfdYnkh-sbj9Ez--_TWHVhXeXNEgq.qRmINNdkRuJ+iHGg5woRa9ydziuJ+DzFG9GnAZRvaaM'; const sleepSecs = 40; let Docker = require('dockerode'); let docker = new Docker(); describe('API job steps', () => { var db = mongojs('localhost/muncher', ['compendia', 'jobs']); before(function (done) { db.compendia.drop(function (err, doc) { db.jobs.drop(function (err, doc) { done(); }); }); }); after(function (done) { db.close(); done(); }); describe('GET /api/v1/job (with no job started)', () => { it('should not yet contain array of job ids, but an empty list as valid JSON and HTTP 200', (done) => { request(global.test_host + '/api/v1/job', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'results'); assert.notProperty(response, 'error'); assert.isArray(response.results); assert.isEmpty(response.results); done(); }); }); }); describe('GET /api/v1/job?compendium_id for non-existing compendium', () => { it('should respond with HTTP 200 and and an empty list in JSON', (done) => { request(global.test_host + '/api/v1/job?compendium_id=1234', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'results'); assert.notProperty(response, 'error'); assert.isArray(response.results); assert.isEmpty(response.results); done(); }); }); }); describe('EXECUTION of unknown compendium', () => { it('should return HTTP error and valid JSON with error message', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: '54321' }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 400); let response = JSON.parse(body); assert.notProperty(response, 'job_id'); assert.property(response, 'error'); done(); }); }); }); describe('EXECUTION of multiple jobs', () => { let job_id0, job_id1, job_id2 = ''; before(function (done) { let req = createCompendiumPostRequest('./test/erc/step_validate_compendium', cookie_o2r); this.timeout(90000); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id0 = id; done(); }); }); }); }); it('should return job ID when starting _another_ job execution (different from the previous id)', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); let response = JSON.parse(body); assert.property(response, 'job_id'); assert.notEqual(response.job_id, job_id0); job_id1 = response.job_id; done(); }); }); it('should return job ID when starting _yet another_ job execution (different from the previous ids)', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); let response = JSON.parse(body); assert.property(response, 'job_id'); assert.notEqual(response.job_id, job_id0); assert.notEqual(response.job_id, job_id1); job_id2 = response.job_id; done(); }); }); }); describe('EXECUTION of candidate compendium', () => { let compendium_id = ''; before(function (done) { let req = createCompendiumPostRequest('./test/erc/step_validate_compendium', cookie_o2r); this.timeout(sleepSecs * 1000 * 2); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; sleep.sleep(sleepSecs); done(); }); }); it('should return HTTP error code and error message as valid JSON when starting job as logged-in user', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.equal(res.statusCode, 400); assert.isObject(JSON.parse(body)); assert.notProperty(response, 'job_id'); assert.property(response, 'error'); done(); }); }); it('should return HTTP error code and error message as valid JSON even when starting as author', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_o2r); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 400); assert.isObject(JSON.parse(body)); let response = JSON.parse(body); assert.notProperty(response, 'job_id'); assert.property(response, 'error'); done(); }); }); it('should return job ID after publishing compendium', (done) => { publishCandidate(compendium_id, cookie_o2r, () => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id } }, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response, 'job_id'); done(); }); }); }).timeout(20000); }); describe('EXECUTION step_validate_bag', () => { let job_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/erc/step_validate_bag', cookie_o2r); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should skip step "validate_bag"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); done(); }); }); it('should fail step "validate_compendium"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_compendium, 'status', 'failure'); done(); }); }); it('should skip configuration generation steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_configuration, 'status', 'skipped', 'generate configuration should be skipped'); done(); }); }); it('should have remaining steps "queued"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'queued', 'generate manifest should be queued'); assert.propertyVal(response.steps.image_prepare, 'status', 'queued', 'image prepare should be queued'); assert.propertyVal(response.steps.image_build, 'status', 'queued', 'image build should be queued'); assert.propertyVal(response.steps.image_execute, 'status', 'queued', 'image execute should be queued'); assert.propertyVal(response.steps.check, 'status', 'queued', 'check should be queued'); done(); }); }); it('should complete step "cleanup"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should have overall status "failure"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response, 'status', 'failure'); done(); }); }); }); describe('EXECUTION step_validate_compendium', () => { let job_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/erc/step_validate_compendium', cookie_o2r); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should complete step "validate_compendium"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); done(); }); }); it('should skip steps "validate_bag" and "generate_configuration"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.generate_configuration, 'status', 'skipped'); done(); }); }); it('should have steps "image_prepare", "image_build", "image_execute", and "check" queued', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_prepare, 'status', 'queued', 'image prepare is queued'); assert.propertyVal(response.steps.image_build, 'status', 'queued', 'image build is queued'); assert.propertyVal(response.steps.image_execute, 'status', 'queued', 'image execute is queued'); assert.propertyVal(response.steps.check, 'status', 'queued', 'check is queued'); done(); }); }); it('should fail step "generate_manifest"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'failure'); done(); }); }); it('should fail overall', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response, 'status', 'failure'); done(); }); }); }); describe('GET /api/v1/job with multiple jobs overall', () => { it('should contain fewer results if start is provided', (done) => { request(global.test_host + '/api/v1/job', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); let all_count = response.results.length; let start = 3; request(global.test_host + '/api/v1/job?start=' + start, (err2, res2, body2) => { assert.ifError(err2); let response2 = JSON.parse(body2); assert.equal(response2.results.length, all_count - start + 1); done(); }); }); }); it('should contain no results but an empty list (valid JSON, HTTP 200) if too large start parameter is provided', (done) => { request(global.test_host + '/api/v1/job?start=999', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'results'); assert.notProperty(response, 'error'); assert.isArray(response.results); assert.isEmpty(response.results); done(); }); }); it('should just list the number of jobs requested', (done) => { request(global.test_host + '/api/v1/job?limit=2', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.isArray(response.results); assert.equal(response.results.length, 2); done(); }); }); }); describe('EXECUTION configuration file generation', () => { it('should skip step (and previous step) for rmd-configuration-file, but complete following steps', (done) => { let req = createCompendiumPostRequest('./test/workspace/rmd-configuration-file', cookie_o2r, 'workspace'); request(req, (err, res, body) => { assert.ifError(err); let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { let job_id = id; sleep.sleep(sleepSecs); request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped', 'skip validate bag'); assert.propertyVal(response.steps.generate_configuration, 'status', 'skipped', 'skip generate configuration because there is one'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success', 'succeed validate compendium'); assert.propertyVal(response.steps.image_prepare, 'status', 'success', 'succeed image prepare'); assert.propertyVal(response.steps.image_build, 'status', 'success', 'succeed image build'); assert.propertyVal(response.steps.image_execute, 'status', 'success', 'succeed image execute'); assert.propertyVal(response.steps.cleanup, 'status', 'success', 'succeed cleanup'); done(); }); }); }); }); }).timeout(sleepSecs * 1000 * 2);; it('should complete step "generate_configuration" and skip previous steps for minimal-rmd-data', (done) => { let req = createCompendiumPostRequest('./test/workspace/minimal-rmd-data', cookie_o2r, 'workspace'); request(req, (err, res, body) => { assert.ifError(err); let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { let job_id = id; sleep.sleep(sleepSecs); request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped', 'skip validate bag'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success', 'succeed validate compendium'); assert.propertyVal(response.steps.generate_configuration, 'status', 'success', 'succeed generate configuration'); assert.propertyVal(response.steps.check, 'status', 'failure', 'fail check'); assert.isBelow(response.steps.check.images[0].compareResults.differences, 3200, 'fail check because of slight differences in image'); done(); }); }); }); }); }).timeout(sleepSecs * 1000 * 2); }); describe('EXECUTION Dockerfile generation for workspace minimal-rmd-data', () => { let job_id = ''; let compendium_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/workspace/minimal-rmd-data', cookie_o2r, 'workspace'); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should skip previous steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); done(); }); }); it('should complete step "generate_manifest"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'success'); done(); }); }); it('show have the manifest file in the job files', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); filePaths = response.files.children.map(elem => { return elem.path; }); assert.include(filePaths, '/api/v1/job/' + job_id + '/data/Dockerfile'); done(); }); }); it('should have the manifest file in the compendium files', (done) => { request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); filePaths = response.files.children.map(elem => { return elem.path; }); assert.include(filePaths, '/api/v1/compendium/' + compendium_id + '/data/Dockerfile'); done(); }); }); it('should have the expected content in the manifest file via the job', function (done) { request(global.test_host_transporter + '/api/v1/job/' + job_id + '/data/Dockerfile', (err, res, body) => { assert.ifError(err); assert.isNotObject(body, 'response is not JSON'); assert.include(body, 'FROM rocker/r-ver:3.4.3'); assert.include(body, 'rmarkdown::render(input = \\"/erc/main.Rmd\\"'); done(); }); }); it('should have the expected content in the manifest file via the compendium', function (done) { request(global.test_host_transporter + '/api/v1/compendium/' + compendium_id + '/data/Dockerfile', (err, res, body) => { assert.ifError(err); assert.isNotObject(body, 'response is not JSON'); assert.include(body, 'FROM rocker/r-ver:3.4.3'); assert.include(body, 'rmarkdown::render(input = \\"/erc/main.Rmd\\"'); done(); }); }); it('should complete build, execute, and cleanup', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); }); describe('EXECUTION Dockerfile generation for workspace minimal-script', () => { let job_id = ''; let compendium_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/workspace/minimal-script', cookie_o2r, 'workspace'); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should skip validation steps because it is a workspace', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); done(); }); }); it('should complete step "generate_manifest"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'success'); done(); }); }); it('show have the manifest file in the job files', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); filePaths = response.files.children.map(elem => { return elem.path; }); assert.include(filePaths, '/api/v1/job/' + job_id + '/data/Dockerfile'); done(); }); }); it('should have the manifest file in the compendium files', (done) => { request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); filePaths = response.files.children.map(elem => { return elem.path; }); assert.include(filePaths, '/api/v1/compendium/' + compendium_id + '/data/Dockerfile'); done(); }); }); it('should have the expected content in the manifest', function (done) { request(global.test_host_transporter + '/api/v1/job/' + job_id + '/data/Dockerfile', (err, res, body) => { assert.ifError(err); assert.isNotObject(body, 'response is not JSON'); assert.notInclude(body, 'COPY', 'no COPY statement, because files are mounted'); assert.include(body, 'CMD ["R", "--vanilla", "-f", "main.R"]'); done(); }); }); it('should complete build, execute, and cleanup', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); }); describe('EXECUTION step_image_prepare', () => { let job_id = ''; before(function (done) { let req = createCompendiumPostRequest('./test/erc/step_image_prepare', cookie_o2r); this.timeout(90000); request(req, (err, res, body) => { let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should complete step "image_prepare"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_prepare, 'status', 'success'); done(); }); }); it('should fail step "image_build"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_build, 'status', 'failure'); done(); }); }); it('should list other image_execute as queued', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_execute, 'status', 'queued'); done(); }); }); it('should complete step "cleanup"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should have deleted payload file during cleanup', (done) => { let tarballFileName = path.join(config.payload.tarball.tmpdir, job_id + '.tar'); try { fs.lstatSync(tarballFileName); assert.fail(); } catch (error) { assert.include(error.message, 'no such file or directory'); done(); } }); }); describe('EXECUTION step_image_build', () => { let job_id = ''; before(function (done) { let req = createCompendiumPostRequest('./test/erc/step_image_build', cookie_o2r); this.timeout(90000); request(req, (err, res, body) => { let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should complete all previous steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped', 'bag validation should fail with "skipped" because of added metadata files'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); assert.propertyVal(response.steps.image_prepare, 'status', 'success'); done(); }); }); it('should skip steps "generate_configuration" and "generate_manifest"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_configuration, 'status', 'skipped'); assert.propertyVal(response.steps.generate_manifest, 'status', 'skipped'); done(); }); }); it('should complete step "image_build"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_build, 'status', 'success'); done(); }); }); it('should fail step "image_execute" with a status code "1"', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_execute, 'status', 'failure'); assert.propertyVal(response.steps.image_execute, 'statuscode', 1); done(); }); }); it('should complete step "cleanup"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should have created an image (skipped if images are not kept)', function (done) { if (config.bagtainer.keepImages) { docker.listImages(function (err, images) { assert.ifError(err); let names = new Set(); images.forEach(function (image) { if (image.RepoTags) { image.RepoTags.forEach(function (tag) { names.add(tag); }); } }); assert.include(names, config.bagtainer.imageNamePrefix + job_id); done(); }); } else { this.skip(); } }).timeout(sleepSecs * 1000); }); describe('EXECUTION step_image_execute', () => { let job_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/erc/step_image_execute', cookie_o2r); request(req, (err, res, body) => { let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs / 2); done(); }); }); }); }); it('should complete step all previous steps (and skip bag validation)', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); assert.propertyVal(response.steps.image_prepare, 'status', 'success'); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); done(); }); }); it('should complete step "image_execute"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); done(); }); }); it('should fail step "check" and have empty images and display properties (depends on https://github.com/o2r-project/erc-checker/issues/8)', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.check, 'status', 'failure'); assert.propertyVal(response.steps.check, 'checkSuccessful', false); assert.property(response.steps.check, 'display'); assert.isNotNull(response.steps.check.display); assert.property(response.steps.check, 'images'); assert.isArray(response.steps.check.images); assert.isEmpty(response.steps.check.images); done(); }); }); it('should have a diff HTML but no images (depends on https://github.com/o2r-project/erc-checker/issues/8)', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.check, 'display'); assert.property(response.steps.check, 'images'); assert.isArray(response.steps.check.images); assert.isEmpty(response.steps.check.images); done(); }); }); it('should have a non-empty errors array', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.check, 'errors'); assert.isArray(response.steps.check.errors); assert.isNotEmpty(response.steps.check.errors); assert.include(JSON.stringify(response.steps.check.errors), 'no such file'); assert.include(JSON.stringify(response.steps.check.errors), 'wrongname.html'); done(); }); }); it('should have step "image_save" queued', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_save, 'status', 'queued'); done(); }); }); it('should complete step "cleanup"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('execution log should include uname output', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.image_execute, 'text'); let uname = unameCall(); log = JSON.stringify(response.steps.image_execute.text); assert.include(log, uname.machine); assert.include(log, uname.release); assert.include(log, uname.sysname); assert.include(log, uname.version); done(); }); }); it('should have deleted container during cleanup (skipped if containers are kept)', function (done) { if (!config.bagtainer.keepContainers) { docker.listContainers({ all: true }, function (err, containers) { containers.forEach(function (containerInfo) { assert.notEqual(containerInfo.Image, config.bagtainer.imageNamePrefix + job_id); }); done(); }); } else { this.skip(); } }); it('should have deleted image during cleanup (skipped if images are kept)', function (done) { if (!config.bagtainer.keepImages) { docker.listImages(function (err, images) { assert.ifError(err); images.forEach(function (image) { let tags = image.RepoTags; tags.forEach(function (tag) { assert.notEqual(tag, config.bagtainer.imageNamePrefix + job_id); }); }); done(); }); } else { this.skip(); } }); it('should have deleted payload file during cleanup', (done) => { let tarballFileName = path.join(config.payload.tarball.tmpdir, job_id + '.tar'); try { fs.lstatSync(tarballFileName); assert.fail(); } catch (error) { assert.include(error.message, 'no such file or directory'); done(); } }); }); describe('EXECUTION step_check', () => { let job_id, compendium_id = ''; before(function (done) { this.timeout(80000); let req = createCompendiumPostRequest('./test/erc/step_check', cookie_o2r); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should complete all other steps (and skip bag validation)', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); assert.propertyVal(response.steps.image_prepare, 'status', 'success'); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); assert.propertyVal(response.steps.image_save, 'status', 'success'); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should complete step "check" but not have a display diff nor images', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.check, 'status', 'success'); assert.propertyVal(response.steps.check, 'checkSuccessful', true); assert.property(response.steps.check, 'display'); assert.isNotNull(response.steps.check.display); assert.property(response.steps.check, 'images'); assert.isArray(response.steps.check.images); assert.isEmpty(response.steps.check.images); done(); }); }); it('should have a reference to the image file in step image_save', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.image_save, 'file'); assert.propertyVal(response.steps.image_save, 'file', 'image.tar'); done(); }); }); it('should have a text log for image_save', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.image_save, 'text'); assert.include(JSON.stringify(response.steps.image_save.text), 'Saved image tarball'); done(); }); }); it('should mention the overwriting of the image tarball when running a second job', function (done) { startJob(compendium_id, id => { job_id = id; sleep.sleep(10); request(global.test_host + '/api/v1/job/' + job_id + '?steps=image_save', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.include(JSON.stringify(response.steps.image_save.text), 'Deleting existing image tarball file'); done(); }); }); }).timeout(20000); it('should list the image tarball in the compendium file listing', function (done) { request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.include(JSON.stringify(response.files), 'image.tar'); done(); }); }); it('should not have the image tarball in the job file listing', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.notInclude(JSON.stringify(response.files), 'image.tar'); done(); }); }); }); describe('EXECUTION check with random result in HTML', () => { let job_id = ''; let compendium_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/workspace/rmd-data-random', cookie_o2r, 'workspace'); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should skip validate bag step', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); done(); }); }); it('should have same start and end date for skipped step', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.validate_bag, 'start'); assert.property(response.steps.validate_bag, 'end'); assert.equal(response.steps.validate_bag.start, response.steps.validate_bag.end, 'skipped step validate bag has same date for start and end'); done(); }); }); it('should complete generate configuration, validate compendium, image build, image execute, and cleanup', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_configuration, 'status', 'success'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should complete generate manifest and have the correct manifest file path in the step details', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'success'); assert.property(response.steps.generate_manifest, 'manifest'); assert.propertyVal(response.steps.generate_manifest, 'manifest', 'Dockerfile'); assert.notInclude(response.steps.generate_manifest.manifest, config.fs.base); done(); }); }); it('should fail the step check', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.check, 'status', 'failure'); done(); }); }); it('should skip the step image_save', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_save, 'status', 'skipped'); done(); }); }); it('should have empty errors array in the step check', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=check', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.check, 'errors'); assert.isArray(response.steps.check.errors) assert.isEmpty(response.steps.check.errors); done(); }); }); it('should have a reference to a diff file step check', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=check', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.check, 'display'); assert.property(response.steps.check.display, 'diff'); done(); }); }); it('should not have an HTML file in the files list named as the main document (output_file naming works)', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.notInclude(JSON.stringify(response.files), 'main.html'); done(); }); }); }); }); describe('API job details filtering', () => { var db = mongojs('localhost/muncher', ['compendia', 'jobs']); var job_id; before(function (done) { this.timeout(90000); db.compendia.drop(function (err, doc) { db.jobs.drop(function (err, doc) { let req = createCompendiumPostRequest('./test/workspace/minimal-rmd-data', cookie_o2r, 'workspace'); request(req, (err, res, body) => { let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); }); }); after(function (done) { db.close(); done(); }); describe('GET /api/v1/job when "steps" is missing', () => { it('should return only status, start and end', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200, 'status code OK'); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end', step + ' has end'); assert.notProperty(value, 'text', step + ' does not have text'); assert.notProperty(value, 'statuscode', step + ' does not have statuscode'); assert.notProperty(value, 'images', step + ' does not have images'); assert.notProperty(value, 'manifest', step + ' does not have manifest'); }); done(); }); }); }); describe('GET /api/v1/job when "steps=all"', () => { it('should return all details', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; assert.property(value, 'text', step + ' has text'); }); assert.property(response.steps.generate_manifest, 'manifest'); assert.property(response.steps.image_execute, 'statuscode'); assert.property(response.steps.check, 'images'); assert.property(response.steps.check, 'display'); done(); }); }); }); describe('GET /api/v1/job for one selected step', () => { it('should give status, start and end but full details for the step', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=generate_manifest', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; if (step != 'generate_manifest') assert.notProperty(value, 'text', step + ' does not have text'); }); assert.property(response.steps.generate_manifest, 'manifest'); assert.property(response.steps.generate_manifest, 'text'); done(); }); }); }); describe('GET /api/v1/job with trailing slash and without', () => { it('should just work', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '/?steps=validate_bag', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let responseWith = JSON.parse(body); request(global.test_host + '/api/v1/job/' + job_id + '?steps=validate_bag', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let responseWithout = JSON.parse(body); assert.property(responseWith.steps.validate_bag, 'status'); assert.property(responseWithout.steps.validate_bag, 'status'); assert.property(responseWith.steps.validate_bag, 'text'); assert.property(responseWithout.steps.validate_bag, 'text'); assert.notProperty(responseWith.steps.validate_compendium, 'text'); assert.notProperty(responseWithout.steps.validate_compendium, 'text'); done(); }); }); }); }); describe('GET /api/v1/job with two selected steps', () => { it('should give status, start and end for all steps, but full details for two selected steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=check,cleanup', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; if (!['check', 'cleanup'].includes(step)) { assert.notProperty(value, 'text', step + ' does not have text'); } }); assert.property(response.steps.check, 'text'); assert.property(response.steps.cleanup, 'text'); assert.property(response.steps.check, 'images'); done(); }); }); }); describe('GET /api/v1/job with two existing steps and one unknown', () => { it('should give status, start and end for all steps, but full details for two selected steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=check,cleanup,oneGiantLeap', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; if (!['check', 'cleanup'].includes(step)) { assert.notProperty(value, 'text', step + ' does not have text'); } }); assert.property(response.steps.check, 'text'); assert.property(response.steps.cleanup, 'text'); assert.property(response.steps.check, 'images'); done(); }); }); }); describe('GET /api/v1/job with unknown steps parameter', () => { it('should have the default behaviour', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=none', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200, 'status code OK'); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; assert.notProperty(value, 'text', step + ' does not have text'); assert.notProperty(value, 'statuscode', step + ' does not have statuscode'); assert.notProperty(value, 'images', step + ' does not have images'); assert.notProperty(value, 'manifest', step + ' does not have manifest'); }); done(); }); }); }); });
test/job-steps.js
/* * (C) Copyright 2017 o2r project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* eslint-env mocha */ const assert = require('chai').assert; const request = require('request'); const config = require('../config/config'); const createCompendiumPostRequest = require('./util').createCompendiumPostRequest; const publishCandidate = require('./util').publishCandidate; const startJob = require('./util').startJob; const mongojs = require('mongojs'); const fs = require('fs'); const sleep = require('sleep'); const unameCall = require('node-uname'); const path = require('path'); require("./setup"); const cookie_o2r = 's:C0LIrsxGtHOGHld8Nv2jedjL4evGgEHo.GMsWD5Vveq0vBt7/4rGeoH5Xx7Dd2pgZR9DvhKCyDTY'; const cookie_plain = 's:yleQfdYnkh-sbj9Ez--_TWHVhXeXNEgq.qRmINNdkRuJ+iHGg5woRa9ydziuJ+DzFG9GnAZRvaaM'; const sleepSecs = 40; let Docker = require('dockerode'); let docker = new Docker(); describe('API job steps', () => { var db = mongojs('localhost/muncher', ['compendia', 'jobs']); before(function (done) { db.compendia.drop(function (err, doc) { db.jobs.drop(function (err, doc) { done(); }); }); }); after(function (done) { db.close(); done(); }); describe('GET /api/v1/job (with no job started)', () => { it('should not yet contain array of job ids, but an empty list as valid JSON and HTTP 200', (done) => { request(global.test_host + '/api/v1/job', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'results'); assert.notProperty(response, 'error'); assert.isArray(response.results); assert.isEmpty(response.results); done(); }); }); }); describe('GET /api/v1/job?compendium_id for non-existing compendium', () => { it('should respond with HTTP 200 and and an empty list in JSON', (done) => { request(global.test_host + '/api/v1/job?compendium_id=1234', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'results'); assert.notProperty(response, 'error'); assert.isArray(response.results); assert.isEmpty(response.results); done(); }); }); }); describe('EXECUTION of unknown compendium', () => { it('should return HTTP error and valid JSON with error message', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: '54321' }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 400); let response = JSON.parse(body); assert.notProperty(response, 'job_id'); assert.property(response, 'error'); done(); }); }); }); describe('EXECUTION of multiple jobs', () => { let job_id0, job_id1, job_id2 = ''; before(function (done) { let req = createCompendiumPostRequest('./test/erc/step_validate_compendium', cookie_o2r); this.timeout(90000); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id0 = id; done(); }); }); }); }); it('should return job ID when starting _another_ job execution (different from the previous id)', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); let response = JSON.parse(body); assert.property(response, 'job_id'); assert.notEqual(response.job_id, job_id0); job_id1 = response.job_id; done(); }); }); it('should return job ID when starting _yet another_ job execution (different from the previous ids)', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); let response = JSON.parse(body); assert.property(response, 'job_id'); assert.notEqual(response.job_id, job_id0); assert.notEqual(response.job_id, job_id1); job_id2 = response.job_id; done(); }); }); }); describe('EXECUTION of candidate compendium', () => { let compendium_id = ''; before(function (done) { let req = createCompendiumPostRequest('./test/erc/step_validate_compendium', cookie_o2r); this.timeout(sleepSecs * 1000 * 2); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; sleep.sleep(sleepSecs); done(); }); }); it('should return HTTP error code and error message as valid JSON when starting job as logged-in user', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.equal(res.statusCode, 400); assert.isObject(JSON.parse(body)); assert.notProperty(response, 'job_id'); assert.property(response, 'error'); done(); }); }); it('should return HTTP error code and error message as valid JSON even when starting as author', (done) => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_o2r); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id }, timeout: 1000 }, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 400); assert.isObject(JSON.parse(body)); let response = JSON.parse(body); assert.notProperty(response, 'job_id'); assert.property(response, 'error'); done(); }); }); it('should return job ID after publishing compendium', (done) => { publishCandidate(compendium_id, cookie_o2r, () => { let j = request.jar(); let ck = request.cookie('connect.sid=' + cookie_plain); j.setCookie(ck, global.test_host); request({ uri: global.test_host + '/api/v1/job', method: 'POST', jar: j, formData: { compendium_id: compendium_id } }, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response, 'job_id'); done(); }); }); }).timeout(20000); }); describe('EXECUTION step_validate_bag', () => { let job_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/erc/step_validate_bag', cookie_o2r); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should skip step "validate_bag"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); done(); }); }); it('should fail step "validate_compendium"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_compendium, 'status', 'failure'); done(); }); }); it('should skip configuration generation steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_configuration, 'status', 'skipped', 'generate configuration should be skipped'); done(); }); }); it('should have remaining steps "queued"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'queued', 'generate manifest should be queued'); assert.propertyVal(response.steps.image_prepare, 'status', 'queued', 'image prepare should be queued'); assert.propertyVal(response.steps.image_build, 'status', 'queued', 'image build should be queued'); assert.propertyVal(response.steps.image_execute, 'status', 'queued', 'image execute should be queued'); assert.propertyVal(response.steps.check, 'status', 'queued', 'check should be queued'); done(); }); }); it('should complete step "cleanup"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should have overall status "failure"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response, 'status', 'failure'); done(); }); }); }); describe('EXECUTION step_validate_compendium', () => { let job_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/erc/step_validate_compendium', cookie_o2r); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should complete step "validate_compendium"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); done(); }); }); it('should skip steps "validate_bag" and "generate_configuration"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.generate_configuration, 'status', 'skipped'); done(); }); }); it('should have steps "image_prepare", "image_build", "image_execute", and "check" queued', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_prepare, 'status', 'queued', 'image prepare is queued'); assert.propertyVal(response.steps.image_build, 'status', 'queued', 'image build is queued'); assert.propertyVal(response.steps.image_execute, 'status', 'queued', 'image execute is queued'); assert.propertyVal(response.steps.check, 'status', 'queued', 'check is queued'); done(); }); }); it('should fail step "generate_manifest"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'failure'); done(); }); }); it('should fail overall', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response, 'status', 'failure'); done(); }); }); }); describe('GET /api/v1/job with multiple jobs overall', () => { it('should contain fewer results if start is provided', (done) => { request(global.test_host + '/api/v1/job', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); let all_count = response.results.length; let start = 3; request(global.test_host + '/api/v1/job?start=' + start, (err2, res2, body2) => { assert.ifError(err2); let response2 = JSON.parse(body2); assert.equal(response2.results.length, all_count - start + 1); done(); }); }); }); it('should contain no results but an empty list (valid JSON, HTTP 200) if too large start parameter is provided', (done) => { request(global.test_host + '/api/v1/job?start=999', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'results'); assert.notProperty(response, 'error'); assert.isArray(response.results); assert.isEmpty(response.results); done(); }); }); it('should just list the number of jobs requested', (done) => { request(global.test_host + '/api/v1/job?limit=2', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.isArray(response.results); assert.equal(response.results.length, 2); done(); }); }); }); describe('EXECUTION configuration file generation', () => { it('should skip step (and previous step) for rmd-configuration-file, but complete following steps', (done) => { let req = createCompendiumPostRequest('./test/workspace/rmd-configuration-file', cookie_o2r, 'workspace'); request(req, (err, res, body) => { assert.ifError(err); let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { let job_id = id; sleep.sleep(sleepSecs); request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped', 'skip validate bag'); assert.propertyVal(response.steps.generate_configuration, 'status', 'skipped', 'skip generate configuration because there is one'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success', 'succeed validate compendium'); assert.propertyVal(response.steps.image_prepare, 'status', 'success', 'succeed image prepare'); assert.propertyVal(response.steps.image_build, 'status', 'success', 'succeed image build'); assert.propertyVal(response.steps.image_execute, 'status', 'success', 'succeed image execute'); assert.propertyVal(response.steps.cleanup, 'status', 'success', 'succeed cleanup'); done(); }); }); }); }); }).timeout(sleepSecs * 1000 * 2);; it('should complete step "generate_configuration" and skip previous steps for minimal-rmd-data', (done) => { let req = createCompendiumPostRequest('./test/workspace/minimal-rmd-data', cookie_o2r, 'workspace'); request(req, (err, res, body) => { assert.ifError(err); let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { let job_id = id; sleep.sleep(sleepSecs); request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped', 'skip validate bag'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success', 'succeed validate compendium'); assert.propertyVal(response.steps.generate_configuration, 'status', 'success', 'succeed generate configuration'); assert.propertyVal(response.steps.check, 'status', 'failure', 'fail check'); assert.isBelow(response.steps.check.images[0].compareResults.differences, 3200, 'fail check because of slight differences in image'); done(); }); }); }); }); }).timeout(sleepSecs * 1000 * 2); }); describe('EXECUTION Dockerfile generation for workspace minimal-rmd-data', () => { let job_id = ''; let compendium_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/workspace/minimal-rmd-data', cookie_o2r, 'workspace'); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should skip previous steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); done(); }); }); it('should complete step "generate_manifest"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'success'); done(); }); }); it('show have the manifest file in the job files', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); filePaths = response.files.children.map(elem => { return elem.path; }); assert.include(filePaths, '/api/v1/job/' + job_id + '/data/Dockerfile'); done(); }); }); it('should have the manifest file in the compendium files', (done) => { request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); filePaths = response.files.children.map(elem => { return elem.path; }); assert.include(filePaths, '/api/v1/compendium/' + compendium_id + '/data/Dockerfile'); done(); }); }); it('should have the expected content in the manifest file via the job', function (done) { request(global.test_host_transporter + '/api/v1/job/' + job_id + '/data/Dockerfile', (err, res, body) => { assert.ifError(err); assert.isNotObject(body, 'response is not JSON'); assert.include(body, 'FROM rocker/r-ver:3.4.3'); assert.include(body, 'rmarkdown::render(input = \\"/erc/main.Rmd\\"'); done(); }); }); it('should have the expected content in the manifest file via the compendium', function (done) { request(global.test_host_transporter + '/api/v1/compendium/' + compendium_id + '/data/Dockerfile', (err, res, body) => { assert.ifError(err); assert.isNotObject(body, 'response is not JSON'); assert.include(body, 'FROM rocker/r-ver:3.4.3'); assert.include(body, 'rmarkdown::render(input = \\"/erc/main.Rmd\\"'); done(); }); }); it('should complete build, execute, and cleanup', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); }); describe('EXECUTION Dockerfile generation for workspace minimal-script', () => { let job_id = ''; let compendium_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/workspace/minimal-script', cookie_o2r, 'workspace'); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should skip validation steps because it is a workspace', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); done(); }); }); it('should complete step "generate_manifest"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'success'); done(); }); }); it('show have the manifest file in the job files', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); filePaths = response.files.children.map(elem => { return elem.path; }); assert.include(filePaths, '/api/v1/job/' + job_id + '/data/Dockerfile'); done(); }); }); it('should have the manifest file in the compendium files', (done) => { request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); filePaths = response.files.children.map(elem => { return elem.path; }); assert.include(filePaths, '/api/v1/compendium/' + compendium_id + '/data/Dockerfile'); done(); }); }); it('should have the expected content in the manifest', function (done) { request(global.test_host_transporter + '/api/v1/job/' + job_id + '/data/Dockerfile', (err, res, body) => { assert.ifError(err); assert.isNotObject(body, 'response is not JSON'); assert.notInclude(body, 'COPY', 'no COPY statement, because files are mounted'); assert.include(body, 'CMD ["R", "--vanilla", "-f", "main.R"]'); done(); }); }); it('should complete build, execute, and cleanup', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); }); describe('EXECUTION step_image_prepare', () => { let job_id = ''; before(function (done) { let req = createCompendiumPostRequest('./test/erc/step_image_prepare', cookie_o2r); this.timeout(90000); request(req, (err, res, body) => { let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should complete step "image_prepare"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_prepare, 'status', 'success'); done(); }); }); it('should fail step "image_build"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_build, 'status', 'failure'); done(); }); }); it('should list other image_execute as queued', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_execute, 'status', 'queued'); done(); }); }); it('should complete step "cleanup"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should have deleted payload file during cleanup', (done) => { let tarballFileName = path.join(config.payload.tarball.tmpdir, job_id + '.tar'); try { fs.lstatSync(tarballFileName); assert.fail(); } catch (error) { assert.include(error.message, 'no such file or directory'); done(); } }); }); describe('EXECUTION step_image_build', () => { let job_id = ''; before(function (done) { let req = createCompendiumPostRequest('./test/erc/step_image_build', cookie_o2r); this.timeout(90000); request(req, (err, res, body) => { let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should complete all previous steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped', 'bag validation should fail with "skipped" because of added metadata files'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); assert.propertyVal(response.steps.image_prepare, 'status', 'success'); done(); }); }); it('should skip steps "generate_configuration" and "generate_manifest"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_configuration, 'status', 'skipped'); assert.propertyVal(response.steps.generate_manifest, 'status', 'skipped'); done(); }); }); it('should complete step "image_build"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_build, 'status', 'success'); done(); }); }); it('should fail step "image_execute" with a status code "1"', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_execute, 'status', 'failure'); assert.propertyVal(response.steps.image_execute, 'statuscode', 1); done(); }); }); it('should complete step "cleanup"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should have created an image (skipped if images are not kept)', function (done) { if (config.bagtainer.keepImages) { docker.listImages(function (err, images) { assert.ifError(err); let names = new Set(); images.forEach(function (image) { if (image.RepoTags) { image.RepoTags.forEach(function (tag) { names.add(tag); }); } }); assert.include(names, config.bagtainer.imageNamePrefix + job_id); done(); }); } else { this.skip(); } }).timeout(sleepSecs * 1000); }); describe('EXECUTION step_image_execute', () => { let job_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/erc/step_image_execute', cookie_o2r); request(req, (err, res, body) => { let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs / 2); done(); }); }); }); }); it('should complete step all previous steps (and skip bag validation)', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); assert.propertyVal(response.steps.image_prepare, 'status', 'success'); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); done(); }); }); it('should complete step "image_execute"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); done(); }); }); it('should fail step "check" and have empty images and display properties (depends on https://github.com/o2r-project/erc-checker/issues/8)', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.check, 'status', 'failure'); assert.propertyVal(response.steps.check, 'checkSuccessful', false); assert.property(response.steps.check, 'display'); assert.isNotNull(response.steps.check.display); assert.property(response.steps.check, 'images'); assert.isArray(response.steps.check.images); assert.isEmpty(response.steps.check.images); done(); }); }); it('should have a diff HTML but no images (depends on https://github.com/o2r-project/erc-checker/issues/8)', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.check, 'display'); assert.property(response.steps.check, 'images'); assert.isArray(response.steps.check.images); assert.isEmpty(response.steps.check.images); done(); }); }); it('should have a non-empty errors array', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.check, 'errors'); assert.isArray(response.steps.check.errors); assert.isNotEmpty(response.steps.check.errors); assert.include(JSON.stringify(response.steps.check.errors), 'no such file'); assert.include(JSON.stringify(response.steps.check.errors), 'wrongname.html'); done(); }); }); it('should have step "image_save" queued', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_save, 'status', 'queued'); done(); }); }); it('should complete step "cleanup"', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('execution log should include uname output', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.image_execute, 'text'); let uname = unameCall(); log = JSON.stringify(response.steps.image_execute.text); assert.include(log, uname.machine); assert.include(log, uname.release); assert.include(log, uname.sysname); assert.include(log, uname.version); done(); }); }); it('should have deleted container during cleanup (skipped if containers are kept)', function (done) { if (!config.bagtainer.keepContainers) { docker.listContainers({ all: true }, function (err, containers) { containers.forEach(function (containerInfo) { assert.notEqual(containerInfo.Image, config.bagtainer.imageNamePrefix + job_id); }); done(); }); } else { this.skip(); } }); it('should have deleted image during cleanup (skipped if images are kept)', function (done) { if (!config.bagtainer.keepImages) { docker.listImages(function (err, images) { assert.ifError(err); images.forEach(function (image) { let tags = image.RepoTags; tags.forEach(function (tag) { assert.notEqual(tag, config.bagtainer.imageNamePrefix + job_id); }); }); done(); }); } else { this.skip(); } }); it('should have deleted payload file during cleanup', (done) => { let tarballFileName = path.join(config.payload.tarball.tmpdir, job_id + '.tar'); try { fs.lstatSync(tarballFileName); assert.fail(); } catch (error) { assert.include(error.message, 'no such file or directory'); done(); } }); }); describe('EXECUTION step_check', () => { let job_id, compendium_id = ''; before(function (done) { this.timeout(80000); let req = createCompendiumPostRequest('./test/erc/step_check', cookie_o2r); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should complete all other steps (and skip bag validation)', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); assert.propertyVal(response.steps.image_prepare, 'status', 'success'); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); assert.propertyVal(response.steps.image_save, 'status', 'success'); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should complete step "check" but not have a display diff nor images', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.check, 'status', 'success'); assert.propertyVal(response.steps.check, 'checkSuccessful', true); assert.property(response.steps.check, 'display'); assert.isNotNull(response.steps.check.display); assert.property(response.steps.check, 'images'); assert.isArray(response.steps.check.images); assert.isEmpty(response.steps.check.images); done(); }); }); it('should have a reference to the image file in step image_save', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.image_save, 'file'); assert.propertyVal(response.steps.image_save, 'file', 'image.tar'); done(); }); }); it('should have a text log for image_save', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.image_save, 'text'); assert.include(JSON.stringify(response.steps.image_save.text), 'Saved image tarball'); done(); }); }); it('should mention the overwriting of the image tarball when running a second job', function (done) { startJob(compendium_id, id => { job_id = id; sleep.sleep(10); request(global.test_host + '/api/v1/job/' + job_id + '?steps=image_save', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.include(JSON.stringify(response.steps.image_save.text), 'Deleting existing image tarball file'); done(); }); }); }).timeout(20000); }); describe('EXECUTION check with random result in HTML', () => { let job_id = ''; let compendium_id = ''; before(function (done) { this.timeout(90000); let req = createCompendiumPostRequest('./test/workspace/rmd-data-random', cookie_o2r, 'workspace'); request(req, (err, res, body) => { compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); it('should skip validate bag step', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.validate_bag, 'status', 'skipped'); done(); }); }); it('should have same start and end date for skipped step', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.validate_bag, 'start'); assert.property(response.steps.validate_bag, 'end'); assert.equal(response.steps.validate_bag.start, response.steps.validate_bag.end, 'skipped step validate bag has same date for start and end'); done(); }); }); it('should complete generate configuration, validate compendium, image build, image execute, and cleanup', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_configuration, 'status', 'success'); assert.propertyVal(response.steps.validate_compendium, 'status', 'success'); assert.propertyVal(response.steps.image_build, 'status', 'success'); assert.propertyVal(response.steps.image_execute, 'status', 'success'); assert.propertyVal(response.steps.cleanup, 'status', 'success'); done(); }); }); it('should complete generate manifest and have the correct manifest file path in the step details', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.generate_manifest, 'status', 'success'); assert.property(response.steps.generate_manifest, 'manifest'); assert.propertyVal(response.steps.generate_manifest, 'manifest', 'Dockerfile'); assert.notInclude(response.steps.generate_manifest.manifest, config.fs.base); done(); }); }); it('should fail the step check', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.check, 'status', 'failure'); done(); }); }); it('should skip the step image_save', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.propertyVal(response.steps.image_save, 'status', 'skipped'); done(); }); }); it('should have empty errors array in the step check', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=check', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.check, 'errors'); assert.isArray(response.steps.check.errors) assert.isEmpty(response.steps.check.errors); done(); }); }); it('should have a reference to a diff file step check', function (done) { request(global.test_host + '/api/v1/job/' + job_id + '?steps=check', (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.property(response.steps.check, 'display'); assert.property(response.steps.check.display, 'diff'); done(); }); }); it('should not have an HTML file in the files list named as the main document (output_file naming works)', function (done) { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); let response = JSON.parse(body); assert.notInclude(JSON.stringify(response.files), 'main.html'); done(); }); }); }); }); describe('API job details filtering', () => { var db = mongojs('localhost/muncher', ['compendia', 'jobs']); var job_id; before(function (done) { this.timeout(90000); db.compendia.drop(function (err, doc) { db.jobs.drop(function (err, doc) { let req = createCompendiumPostRequest('./test/workspace/minimal-rmd-data', cookie_o2r, 'workspace'); request(req, (err, res, body) => { let compendium_id = JSON.parse(body).id; publishCandidate(compendium_id, cookie_o2r, () => { startJob(compendium_id, id => { job_id = id; sleep.sleep(sleepSecs); done(); }); }); }); }); }); }); after(function (done) { db.close(); done(); }); describe('GET /api/v1/job when "steps" is missing', () => { it('should return only status, start and end', (done) => { request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200, 'status code OK'); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end', step + ' has end'); assert.notProperty(value, 'text', step + ' does not have text'); assert.notProperty(value, 'statuscode', step + ' does not have statuscode'); assert.notProperty(value, 'images', step + ' does not have images'); assert.notProperty(value, 'manifest', step + ' does not have manifest'); }); done(); }); }); }); describe('GET /api/v1/job when "steps=all"', () => { it('should return all details', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=all', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; assert.property(value, 'text', step + ' has text'); }); assert.property(response.steps.generate_manifest, 'manifest'); assert.property(response.steps.image_execute, 'statuscode'); assert.property(response.steps.check, 'images'); assert.property(response.steps.check, 'display'); done(); }); }); }); describe('GET /api/v1/job for one selected step', () => { it('should give status, start and end but full details for the step', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=generate_manifest', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; if (step != 'generate_manifest') assert.notProperty(value, 'text', step + ' does not have text'); }); assert.property(response.steps.generate_manifest, 'manifest'); assert.property(response.steps.generate_manifest, 'text'); done(); }); }); }); describe('GET /api/v1/job with trailing slash and without', () => { it('should just work', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '/?steps=validate_bag', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let responseWith = JSON.parse(body); request(global.test_host + '/api/v1/job/' + job_id + '?steps=validate_bag', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let responseWithout = JSON.parse(body); assert.property(responseWith.steps.validate_bag, 'status'); assert.property(responseWithout.steps.validate_bag, 'status'); assert.property(responseWith.steps.validate_bag, 'text'); assert.property(responseWithout.steps.validate_bag, 'text'); assert.notProperty(responseWith.steps.validate_compendium, 'text'); assert.notProperty(responseWithout.steps.validate_compendium, 'text'); done(); }); }); }); }); describe('GET /api/v1/job with two selected steps', () => { it('should give status, start and end for all steps, but full details for two selected steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=check,cleanup', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; if (!['check', 'cleanup'].includes(step)) { assert.notProperty(value, 'text', step + ' does not have text'); } }); assert.property(response.steps.check, 'text'); assert.property(response.steps.cleanup, 'text'); assert.property(response.steps.check, 'images'); done(); }); }); }); describe('GET /api/v1/job with two existing steps and one unknown', () => { it('should give status, start and end for all steps, but full details for two selected steps', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=check,cleanup,oneGiantLeap', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; if (!['check', 'cleanup'].includes(step)) { assert.notProperty(value, 'text', step + ' does not have text'); } }); assert.property(response.steps.check, 'text'); assert.property(response.steps.cleanup, 'text'); assert.property(response.steps.check, 'images'); done(); }); }); }); describe('GET /api/v1/job with unknown steps parameter', () => { it('should have the default behaviour', (done) => { request(global.test_host + '/api/v1/job/' + job_id + '?steps=none', (err, res, body) => { assert.ifError(err); assert.equal(res.statusCode, 200, 'status code OK'); assert.isObject(JSON.parse(body), 'returned JSON'); let response = JSON.parse(body); assert.property(response, 'steps'); Object.entries(response.steps).forEach(([step, value], index, array) => { assert.property(value, 'status', step + ' has status'); assert.property(value, 'start', step + ' has start'); assert.property(value, 'end'), step + ' has end'; assert.notProperty(value, 'text', step + ' does not have text'); assert.notProperty(value, 'statuscode', step + ' does not have statuscode'); assert.notProperty(value, 'images', step + ' does not have images'); assert.notProperty(value, 'manifest', step + ' does not have manifest'); }); done(); }); }); }); });
add test for image.tar (non)existence
test/job-steps.js
add test for image.tar (non)existence
<ide><path>est/job-steps.js <ide> }); <ide> }); <ide> }).timeout(20000); <add> <add> it('should list the image tarball in the compendium file listing', function (done) { <add> request(global.test_host + '/api/v1/compendium/' + compendium_id, (err, res, body) => { <add> assert.ifError(err); <add> let response = JSON.parse(body); <add> <add> assert.include(JSON.stringify(response.files), 'image.tar'); <add> done(); <add> }); <add> }); <add> <add> it('should not have the image tarball in the job file listing', function (done) { <add> request(global.test_host + '/api/v1/job/' + job_id, (err, res, body) => { <add> assert.ifError(err); <add> let response = JSON.parse(body); <add> <add> assert.notInclude(JSON.stringify(response.files), 'image.tar'); <add> done(); <add> }); <add> }); <ide> }); <ide> <ide> describe('EXECUTION check with random result in HTML', () => {
Java
apache-2.0
76536ce5c99651ba171971f8739793d3b0060341
0
vt0r/k-9,rishabhbitsg/k-9,ndew623/k-9,ndew623/k-9,mawiegand/k-9,mawiegand/k-9,roscrazy/k-9,philipwhiuk/q-mail,philipwhiuk/k-9,cketti/k-9,G00fY2/k-9_material_design,sedrubal/k-9,philipwhiuk/q-mail,indus1/k-9,jca02266/k-9,dgger/k-9,sedrubal/k-9,vatsalsura/k-9,cketti/k-9,philipwhiuk/q-mail,cketti/k-9,k9mail/k-9,indus1/k-9,vt0r/k-9,jca02266/k-9,cketti/k-9,philipwhiuk/k-9,jca02266/k-9,dgger/k-9,dgger/k-9,CodingRmy/k-9,k9mail/k-9,mawiegand/k-9,k9mail/k-9,ndew623/k-9,vatsalsura/k-9,rishabhbitsg/k-9,roscrazy/k-9,G00fY2/k-9_material_design,CodingRmy/k-9
package com.fsck.k9.mail.store.webdav; import android.util.Log; import com.fsck.k9.mail.*; import com.fsck.k9.mail.filter.Base64; import com.fsck.k9.mail.CertificateValidationException; import com.fsck.k9.mail.store.RemoteStore; import com.fsck.k9.mail.store.StoreConfig; import org.apache.http.*; import org.apache.http.client.CookieStore; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.protocol.ClientContext; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.message.BasicNameValuePair; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.HttpContext; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import javax.net.ssl.SSLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import java.io.*; import java.net.URI; import java.net.URISyntaxException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.util.*; import static com.fsck.k9.mail.K9MailLib.DEBUG_PROTOCOL_WEBDAV; import static com.fsck.k9.mail.K9MailLib.LOG_TAG; import static com.fsck.k9.mail.helper.UrlEncodingHelper.decodeUtf8; import static com.fsck.k9.mail.helper.UrlEncodingHelper.encodeUtf8; /** * <pre> * Uses WebDAV formatted HTTP calls to an MS Exchange server to fetch email * and email information. * </pre> */ public class WebDavStore extends RemoteStore { /** * Decodes a WebDavStore URI. * <p/> * <p>Possible forms:</p> * <pre> * webdav://user:password@server:port ConnectionSecurity.NONE * webdav+ssl+://user:password@server:port ConnectionSecurity.SSL_TLS_REQUIRED * </pre> */ public static WebDavStoreSettings decodeUri(String uri) { String host; int port; ConnectionSecurity connectionSecurity; String username = null; String password = null; String alias = null; String path = null; String authPath = null; String mailboxPath = null; URI webDavUri; try { webDavUri = new URI(uri); } catch (URISyntaxException use) { throw new IllegalArgumentException("Invalid WebDavStore URI", use); } String scheme = webDavUri.getScheme(); /* * Currently available schemes are: * webdav * webdav+ssl+ * * The following are obsolete schemes that may be found in pre-existing * settings from earlier versions or that may be found when imported. We * continue to recognize them and re-map them appropriately: * webdav+tls * webdav+tls+ * webdav+ssl */ if (scheme.equals("webdav")) { connectionSecurity = ConnectionSecurity.NONE; } else if (scheme.startsWith("webdav+")) { connectionSecurity = ConnectionSecurity.SSL_TLS_REQUIRED; } else { throw new IllegalArgumentException("Unsupported protocol (" + scheme + ")"); } host = webDavUri.getHost(); if (host.startsWith("http")) { String[] hostParts = host.split("://", 2); if (hostParts.length > 1) { host = hostParts[1]; } } port = webDavUri.getPort(); String userInfo = webDavUri.getUserInfo(); if (userInfo != null) { String[] userInfoParts = userInfo.split(":"); username = decodeUtf8(userInfoParts[0]); String userParts[] = username.split("\\\\", 2); if (userParts.length > 1) { alias = userParts[1]; } else { alias = username; } if (userInfoParts.length > 1) { password = decodeUtf8(userInfoParts[1]); } } String[] pathParts = webDavUri.getPath().split("\\|"); for (int i = 0, count = pathParts.length; i < count; i++) { if (i == 0) { if (pathParts[0] != null && pathParts[0].length() > 1) { path = pathParts[0]; } } else if (i == 1) { if (pathParts[1] != null && pathParts[1].length() > 1) { authPath = pathParts[1]; } } else if (i == 2) { if (pathParts[2] != null && pathParts[2].length() > 1) { mailboxPath = pathParts[2]; } } } return new WebDavStoreSettings(host, port, connectionSecurity, null, username, password, null, alias, path, authPath, mailboxPath); } /** * Creates a WebDavStore URI with the supplied settings. * * @param server The {@link ServerSettings} object that holds the server settings. * @return A WebDavStore URI that holds the same information as the {@code server} parameter. * @see StoreConfig#getStoreUri() * @see WebDavStore#decodeUri(String) */ public static String createUri(ServerSettings server) { String userEnc = encodeUtf8(server.username); String passwordEnc = (server.password != null) ? encodeUtf8(server.password) : ""; String scheme; switch (server.connectionSecurity) { case SSL_TLS_REQUIRED: scheme = "webdav+ssl+"; break; default: case NONE: scheme = "webdav"; break; } String userInfo = userEnc + ":" + passwordEnc; String uriPath; Map<String, String> extra = server.getExtra(); if (extra != null) { String path = extra.get(WebDavStoreSettings.PATH_KEY); path = (path != null) ? path : ""; String authPath = extra.get(WebDavStoreSettings.AUTH_PATH_KEY); authPath = (authPath != null) ? authPath : ""; String mailboxPath = extra.get(WebDavStoreSettings.MAILBOX_PATH_KEY); mailboxPath = (mailboxPath != null) ? mailboxPath : ""; uriPath = "/" + path + "|" + authPath + "|" + mailboxPath; } else { uriPath = "/||"; } try { return new URI(scheme, userInfo, server.host, server.port, uriPath, null, null).toString(); } catch (URISyntaxException e) { throw new IllegalArgumentException("Can't create WebDavStore URI", e); } } private ConnectionSecurity mConnectionSecurity; private String mUsername; /* Stores the username for authentications */ private String mAlias; /* Stores the alias for the user's mailbox */ private String mPassword; /* Stores the password for authentications */ private String mUrl; /* Stores the base URL for the server */ private String mHost; /* Stores the host name for the server */ private int mPort; private String mPath; /* Stores the path for the server */ private String mAuthPath; /* Stores the path off of the server to post data to for form based authentication */ private String mMailboxPath; /* Stores the user specified path to the mailbox */ private final WebDavHttpClient.WebDavHttpClientFactory mHttpClientFactory; private WebDavHttpClient mHttpClient = null; private HttpContext mContext = null; private String mAuthString; private CookieStore mAuthCookies = null; private short mAuthentication = WebDavConstants.AUTH_TYPE_NONE; private String mCachedLoginUrl; private Folder mSendFolder = null; private Map<String, WebDavFolder> mFolderList = new HashMap<String, WebDavFolder>(); public WebDavStore(StoreConfig storeConfig, WebDavHttpClient.WebDavHttpClientFactory clientFactory) throws MessagingException { super(storeConfig, null); mHttpClientFactory = clientFactory; WebDavStoreSettings settings; try { settings = WebDavStore.decodeUri(storeConfig.getStoreUri()); } catch (IllegalArgumentException e) { throw new MessagingException("Error while decoding store URI", e); } mHost = settings.host; mPort = settings.port; mConnectionSecurity = settings.connectionSecurity; mUsername = settings.username; mPassword = settings.password; mAlias = settings.alias; mPath = settings.path; mAuthPath = settings.authPath; mMailboxPath = settings.mailboxPath; if (mPath == null || mPath.equals("")) { mPath = "/Exchange"; } else if (!mPath.startsWith("/")) { mPath = "/" + mPath; } if (mMailboxPath == null || mMailboxPath.equals("")) { mMailboxPath = "/" + mAlias; } else if (!mMailboxPath.startsWith("/")) { mMailboxPath = "/" + mMailboxPath; } if (mAuthPath != null && !mAuthPath.equals("") && !mAuthPath.startsWith("/")) { mAuthPath = "/" + mAuthPath; } // The URL typically looks like the following: "https://mail.domain.com/Exchange/alias". // The inbox path would look like: "https://mail.domain.com/Exchange/alias/Inbox". mUrl = getRoot() + mPath + mMailboxPath; mAuthString = "Basic " + Base64.encode(mUsername + ":" + mPassword); } private String getRoot() { String root; if (mConnectionSecurity == ConnectionSecurity.SSL_TLS_REQUIRED) { root = "https"; } else { root = "http"; } root += "://" + mHost + ":" + mPort; return root; } HttpContext getContext() { return mContext; } short getAuthentication() { return mAuthentication; } StoreConfig getStoreConfig() { return mStoreConfig; } @Override public void checkSettings() throws MessagingException { authenticate(); } @Override public List<? extends Folder> getPersonalNamespaces(boolean forceListAll) throws MessagingException { List<Folder> folderList = new LinkedList<Folder>(); /** * We have to check authentication here so we have the proper URL stored */ getHttpClient(); /** * Firstly we get the "special" folders list (inbox, outbox, etc) * and setup the account accordingly */ Map<String, String> headers = new HashMap<String, String>(); headers.put("Depth", "0"); headers.put("Brief", "t"); DataSet dataset = processRequest(this.mUrl, "PROPFIND", getSpecialFoldersList(), headers); Map<String, String> specialFoldersMap = dataset.getSpecialFolderToUrl(); String folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_INBOX_FOLDER)); if (folderName != null) { mStoreConfig.setAutoExpandFolderName(folderName); mStoreConfig.setInboxFolderName(folderName); } folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_DRAFTS_FOLDER)); if (folderName != null) mStoreConfig.setDraftsFolderName(folderName); folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_TRASH_FOLDER)); if (folderName != null) mStoreConfig.setTrashFolderName(folderName); folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_SPAM_FOLDER)); if (folderName != null) mStoreConfig.setSpamFolderName(folderName); // K-9 Mail's outbox is a special local folder and different from Exchange/WebDAV's outbox. /* folderName = getFolderName(specialFoldersMap.get(DAV_MAIL_OUTBOX_FOLDER)); if (folderName != null) mAccount.setOutboxFolderName(folderName); */ folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_SENT_FOLDER)); if (folderName != null) mStoreConfig.setSentFolderName(folderName); /** * Next we get all the folders (including "special" ones) */ headers = new HashMap<String, String>(); headers.put("Brief", "t"); dataset = processRequest(this.mUrl, "SEARCH", getFolderListXml(), headers); String[] folderUrls = dataset.getHrefs(); for (String tempUrl : folderUrls) { WebDavFolder folder = createFolder(tempUrl); if (folder != null) folderList.add(folder); } return folderList; } /** * Creates a folder using the URL passed as parameter (only if it has not been * already created) and adds this to our store folder map. * * @param folderUrl * @return */ private WebDavFolder createFolder(String folderUrl) { if (folderUrl == null) return null; WebDavFolder wdFolder = null; String folderName = getFolderName(folderUrl); if (folderName != null) { wdFolder = getFolder(folderName); if (wdFolder != null) { wdFolder.setUrl(folderUrl); } } // else: Unknown URL format => NO Folder created return wdFolder; } private String getFolderName(String folderUrl) { if (folderUrl == null) return null; // Here we extract the folder name starting from the complete url. // folderUrl is in the form http://mail.domain.com/exchange/username/foldername // so we need "foldername" which is the string after the fifth slash int folderSlash = -1; for (int j = 0; j < 5; j++) { folderSlash = folderUrl.indexOf('/', folderSlash + 1); if (folderSlash < 0) break; } if (folderSlash > 0) { String fullPathName; // Removes the final slash if present if (folderUrl.charAt(folderUrl.length() - 1) == '/') fullPathName = folderUrl.substring(folderSlash + 1, folderUrl.length() - 1); else fullPathName = folderUrl.substring(folderSlash + 1); // Decodes the url-encoded folder name (i.e. "My%20folder" => "My Folder" return decodeUtf8(fullPathName); } return null; } @Override public WebDavFolder getFolder(String name) { WebDavFolder folder; if ((folder = this.mFolderList.get(name)) == null) { folder = new WebDavFolder(this, name); mFolderList.put(name, folder); } return folder; } public Folder getSendSpoolFolder() throws MessagingException { if (mSendFolder == null) mSendFolder = getFolder(WebDavConstants.DAV_MAIL_SEND_FOLDER); return mSendFolder; } @Override public boolean isMoveCapable() { return true; } @Override public boolean isCopyCapable() { return true; } private String getSpecialFoldersList() { StringBuilder builder = new StringBuilder(200); builder.append("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>"); builder.append("<propfind xmlns=\"DAV:\">"); builder.append("<prop>"); builder.append("<").append(WebDavConstants.DAV_MAIL_INBOX_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_DRAFTS_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_OUTBOX_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_SENT_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_TRASH_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); // This should always be ##DavMailSubmissionURI## for which we already have a constant // buffer.append("<sendmsg xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_SPAM_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("</prop>"); builder.append("</propfind>"); return builder.toString(); } /*************************************************************** * WebDAV XML Request body retrieval functions */ private String getFolderListXml() { StringBuilder builder = new StringBuilder(200); builder.append("<?xml version='1.0' ?>"); builder.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); builder.append("SELECT \"DAV:uid\", \"DAV:ishidden\"\r\n"); builder.append(" FROM SCOPE('deep traversal of \"").append(this.mUrl).append("\"')\r\n"); builder.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=True\r\n"); builder.append("</a:sql></a:searchrequest>\r\n"); return builder.toString(); } String getMessageCountXml(String messageState) { StringBuilder builder = new StringBuilder(200); builder.append("<?xml version='1.0' ?>"); builder.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); builder.append("SELECT \"DAV:visiblecount\"\r\n"); builder.append(" FROM \"\"\r\n"); builder.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND \"urn:schemas:httpmail:read\"=") .append(messageState).append("\r\n"); builder.append(" GROUP BY \"DAV:ishidden\"\r\n"); builder.append("</a:sql></a:searchrequest>\r\n"); return builder.toString(); } String getMessageEnvelopeXml(String[] uids) { StringBuilder buffer = new StringBuilder(200); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"DAV:uid\", \"DAV:getcontentlength\","); buffer.append(" \"urn:schemas:mailheader:mime-version\","); buffer.append(" \"urn:schemas:mailheader:content-type\","); buffer.append(" \"urn:schemas:mailheader:subject\","); buffer.append(" \"urn:schemas:mailheader:date\","); buffer.append(" \"urn:schemas:mailheader:thread-topic\","); buffer.append(" \"urn:schemas:mailheader:thread-index\","); buffer.append(" \"urn:schemas:mailheader:from\","); buffer.append(" \"urn:schemas:mailheader:to\","); buffer.append(" \"urn:schemas:mailheader:in-reply-to\","); buffer.append(" \"urn:schemas:mailheader:cc\","); buffer.append(" \"urn:schemas:httpmail:read\""); buffer.append(" \r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMessagesXml() { StringBuilder builder = new StringBuilder(200); builder.append("<?xml version='1.0' ?>"); builder.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); builder.append("SELECT \"DAV:uid\"\r\n"); builder.append(" FROM \"\"\r\n"); builder.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False\r\n"); builder.append("</a:sql></a:searchrequest>\r\n"); return builder.toString(); } String getMessageUrlsXml(String[] uids) { StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"urn:schemas:httpmail:read\", \"DAV:uid\"\r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMessageFlagsXml(String[] uids) throws MessagingException { if (uids.length == 0) { throw new MessagingException("Attempt to get flags on 0 length array for uids"); } StringBuilder buffer = new StringBuilder(200); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"urn:schemas:httpmail:read\", \"DAV:uid\"\r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMarkMessagesReadXml(String[] urls, boolean read) { StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>\r\n"); buffer.append("<a:propertyupdate xmlns:a='DAV:' xmlns:b='urn:schemas:httpmail:'>\r\n"); buffer.append("<a:target>\r\n"); for (String url : urls) { buffer.append(" <a:href>").append(url).append("</a:href>\r\n"); } buffer.append("</a:target>\r\n"); buffer.append("<a:set>\r\n"); buffer.append(" <a:prop>\r\n"); buffer.append(" <b:read>").append(read ? "1" : "0").append("</b:read>\r\n"); buffer.append(" </a:prop>\r\n"); buffer.append("</a:set>\r\n"); buffer.append("</a:propertyupdate>\r\n"); return buffer.toString(); } // For flag: // http://www.devnewsgroups.net/group/microsoft.public.exchange.development/topic27175.aspx // "<m:0x10900003>1</m:0x10900003>" & _ String getMoveOrCopyMessagesReadXml(String[] urls, boolean isMove) { String action = (isMove ? "move" : "copy"); StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>\r\n"); buffer.append("<a:").append(action).append(" xmlns:a='DAV:' xmlns:b='urn:schemas:httpmail:'>\r\n"); buffer.append("<a:target>\r\n"); for (String url : urls) { buffer.append(" <a:href>").append(url).append("</a:href>\r\n"); } buffer.append("</a:target>\r\n"); buffer.append("</a:").append(action).append(">\r\n"); return buffer.toString(); } /*************************************************************** * Authentication related methods */ /** * Determines which type of authentication Exchange is using and authenticates appropriately. * * @throws MessagingException */ public boolean authenticate() throws MessagingException { try { if (mAuthentication == WebDavConstants.AUTH_TYPE_NONE) { ConnectionInfo info = doInitialConnection(); if (info.requiredAuthType == WebDavConstants.AUTH_TYPE_BASIC) { HttpGeneric request = new HttpGeneric(mUrl); request.setMethod("GET"); request.setHeader("Authorization", mAuthString); WebDavHttpClient httpClient = getHttpClient(); HttpResponse response = httpClient.executeOverride(request, mContext); int statusCode = response.getStatusLine().getStatusCode(); if (statusCode >= 200 && statusCode < 300) { mAuthentication = WebDavConstants.AUTH_TYPE_BASIC; } else if (statusCode == 401) { throw new MessagingException("Invalid username or password for authentication."); } else { throw new MessagingException("Error with code " + response.getStatusLine().getStatusCode() + " during request processing: " + response.getStatusLine().toString()); } } else if (info.requiredAuthType == WebDavConstants.AUTH_TYPE_FORM_BASED) { doFBA(info); } } else if (mAuthentication == WebDavConstants.AUTH_TYPE_BASIC) { // Nothing to do, we authenticate with every request when // using basic authentication. } else if (mAuthentication == WebDavConstants.AUTH_TYPE_FORM_BASED) { // Our cookie expired, re-authenticate. doFBA(null); } } catch (IOException ioe) { Log.e(LOG_TAG, "Error during authentication: " + ioe + "\nStack: " + WebDavUtils.processException(ioe)); throw new MessagingException("Error during authentication", ioe); } return mAuthentication != WebDavConstants.AUTH_TYPE_NONE; } /** * Makes the initial connection to Exchange for authentication. Determines the type of authentication necessary for * the server. * * @throws MessagingException */ private ConnectionInfo doInitialConnection() throws MessagingException { // For our initial connection we are sending an empty GET request to // the configured URL, which should be in the following form: // https://mail.server.com/Exchange/alias // // Possible status codes include: // 401 - the server uses basic authentication // 30x - the server is trying to redirect us to an OWA login // 20x - success // // The latter two indicate form-based authentication. ConnectionInfo info = new ConnectionInfo(); WebDavHttpClient httpClient = getHttpClient(); HttpGeneric request = new HttpGeneric(mUrl); request.setMethod("GET"); try { HttpResponse response = httpClient.executeOverride(request, mContext); info.statusCode = response.getStatusLine().getStatusCode(); if (info.statusCode == 401) { // 401 is the "Unauthorized" status code, meaning the server wants // an authentication header for basic authentication. info.requiredAuthType = WebDavConstants.AUTH_TYPE_BASIC; } else if ((info.statusCode >= 200 && info.statusCode < 300) || // Success (info.statusCode >= 300 && info.statusCode < 400) || // Redirect (info.statusCode == 440)) { // Unauthorized // We will handle all 3 situations the same. First we take an educated // guess at where the authorization DLL is located. If this is this // doesn't work, then we'll use the redirection URL for OWA login given // to us by exchange. We can use this to scrape the location of the // authorization URL. info.requiredAuthType = WebDavConstants.AUTH_TYPE_FORM_BASED; if (mAuthPath != null && !mAuthPath.equals("")) { // The user specified their own authentication path, use that. info.guessedAuthUrl = getRoot() + mAuthPath; } else { // Use the default path to the authentication dll. info.guessedAuthUrl = getRoot() + "/exchweb/bin/auth/owaauth.dll"; } // Determine where the server is trying to redirect us. Header location = response.getFirstHeader("Location"); if (location != null) { info.redirectUrl = location.getValue(); } } else { throw new IOException("Error with code " + info.statusCode + " during request processing: " + response.getStatusLine().toString()); } } catch (SSLException e) { throw new CertificateValidationException(e.getMessage(), e); } catch (IOException ioe) { Log.e(LOG_TAG, "IOException: " + ioe + "\nTrace: " + WebDavUtils.processException(ioe)); throw new MessagingException("IOException", ioe); } return info; } /** * Performs form-based authentication. * * @throws MessagingException */ public void doFBA(ConnectionInfo info) throws IOException, MessagingException { // Clear out cookies from any previous authentication. if (mAuthCookies != null) mAuthCookies.clear(); WebDavHttpClient httpClient = getHttpClient(); String loginUrl; if (info != null) { loginUrl = info.guessedAuthUrl; } else if (mCachedLoginUrl != null && !mCachedLoginUrl.equals("")) { loginUrl = mCachedLoginUrl; } else { throw new MessagingException("No valid login URL available for form-based authentication."); } HttpGeneric request = new HttpGeneric(loginUrl); request.setMethod("POST"); // Build the POST data. List<BasicNameValuePair> pairs = new ArrayList<BasicNameValuePair>(); pairs.add(new BasicNameValuePair("destination", mUrl)); pairs.add(new BasicNameValuePair("username", mUsername)); pairs.add(new BasicNameValuePair("password", mPassword)); pairs.add(new BasicNameValuePair("flags", "0")); pairs.add(new BasicNameValuePair("SubmitCreds", "Log+On")); pairs.add(new BasicNameValuePair("forcedownlevel", "0")); pairs.add(new BasicNameValuePair("trusted", "0")); UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(pairs); request.setEntity(formEntity); HttpResponse response = httpClient.executeOverride(request, mContext); boolean authenticated = testAuthenticationResponse(response); if (!authenticated) { // Check the response from the authentication request above for a form action. String formAction = findFormAction(WebDavHttpClient.getUngzippedContent(response.getEntity())); if (formAction == null) { // If there is no form action, try using our redirect URL from the initial connection. if (info != null && info.redirectUrl != null && !info.redirectUrl.equals("")) { loginUrl = info.redirectUrl; request = new HttpGeneric(loginUrl); request.setMethod("GET"); response = httpClient.executeOverride(request, mContext); formAction = findFormAction(WebDavHttpClient.getUngzippedContent(response.getEntity())); } } if (formAction != null) { try { URI formActionUri = new URI(formAction); URI loginUri = new URI(loginUrl); if (formActionUri.isAbsolute()) { // The form action is an absolute URL, just use it. loginUrl = formAction; } else { // Append the form action to our current URL, minus the file name. String urlPath; if (formAction.startsWith("/")) { urlPath = formAction; } else { urlPath = loginUri.getPath(); int lastPathPos = urlPath.lastIndexOf('/'); if (lastPathPos > -1) { urlPath = urlPath.substring(0, lastPathPos + 1); urlPath = urlPath.concat(formAction); } } // Reconstruct the login URL based on the original login URL and the form action. URI finalUri = new URI(loginUri.getScheme(), loginUri.getUserInfo(), loginUri.getHost(), loginUri.getPort(), urlPath, null, null); loginUrl = finalUri.toString(); } // Retry the login using our new URL. request = new HttpGeneric(loginUrl); request.setMethod("POST"); request.setEntity(formEntity); response = httpClient.executeOverride(request, mContext); authenticated = testAuthenticationResponse(response); } catch (URISyntaxException e) { Log.e(LOG_TAG, "URISyntaxException caught " + e + "\nTrace: " + WebDavUtils.processException(e)); throw new MessagingException("URISyntaxException caught", e); } } else { throw new MessagingException("A valid URL for Exchange authentication could not be found."); } } if (authenticated) { mAuthentication = WebDavConstants.AUTH_TYPE_FORM_BASED; mCachedLoginUrl = loginUrl; } else { throw new MessagingException("Invalid credentials provided for authentication."); } } /** * Searches the specified stream for an HTML form and returns the form's action target. * * @throws IOException */ private String findFormAction(InputStream istream) throws IOException { String formAction = null; BufferedReader reader = new BufferedReader(new InputStreamReader(istream), 4096); String tempText; // Read line by line until we find something like: <form action="owaauth.dll"...>. while ((tempText = reader.readLine()) != null && formAction == null) { if (tempText.contains(" action=")) { String[] actionParts = tempText.split(" action="); if (actionParts.length > 1 && actionParts[1].length() > 1) { char openQuote = actionParts[1].charAt(0); int closePos = actionParts[1].indexOf(openQuote, 1); if (closePos > 1) { formAction = actionParts[1].substring(1, closePos); // Remove any GET parameters. int quesPos = formAction.indexOf('?'); if (quesPos != -1) { formAction = formAction.substring(0, quesPos); } } } } } return formAction; } private boolean testAuthenticationResponse(HttpResponse response) throws MessagingException { boolean authenticated = false; int statusCode = response.getStatusLine().getStatusCode(); // Exchange 2007 will return a 302 status code no matter what. if (((statusCode >= 200 && statusCode < 300) || statusCode == 302) && mAuthCookies != null && !mAuthCookies.getCookies().isEmpty()) { // We may be authenticated, we need to send a test request to know for sure. // Exchange 2007 adds the same cookies whether the username and password were valid or not. ConnectionInfo info = doInitialConnection(); if (info.statusCode >= 200 && info.statusCode < 300) { authenticated = true; } else if (info.statusCode == 302) { // If we are successfully authenticated, Exchange will try to redirect us to our OWA inbox. // Otherwise, it will redirect us to a logon page. // Our URL is in the form: https://hostname:port/Exchange/alias. // The redirect is in the form: https://hostname:port/owa/alias. // Do a simple replace and compare the resulting strings. try { String thisPath = new URI(mUrl).getPath(); String redirectPath = new URI(info.redirectUrl).getPath(); if (!thisPath.endsWith("/")) { thisPath = thisPath.concat("/"); } if (!redirectPath.endsWith("/")) { redirectPath = redirectPath.concat("/"); } if (redirectPath.equalsIgnoreCase(thisPath)) { authenticated = true; } else { int found = thisPath.indexOf('/', 1); if (found != -1) { String replace = thisPath.substring(0, found + 1); redirectPath = redirectPath.replace("/owa/", replace); if (redirectPath.equalsIgnoreCase(thisPath)) { authenticated = true; } } } } catch (URISyntaxException e) { Log.e(LOG_TAG, "URISyntaxException caught " + e + "\nTrace: " + WebDavUtils.processException(e)); throw new MessagingException("URISyntaxException caught", e); } } } return authenticated; } public CookieStore getAuthCookies() { return mAuthCookies; } public String getAlias() { return mAlias; } public String getUrl() { return mUrl; } public WebDavHttpClient getHttpClient() throws MessagingException { if (mHttpClient == null) { mHttpClient = mHttpClientFactory.create(); // Disable automatic redirects on the http client. mHttpClient.getParams().setBooleanParameter("http.protocol.handle-redirects", false); // Setup a cookie store for forms-based authentication. mContext = new BasicHttpContext(); mAuthCookies = new BasicCookieStore(); mContext.setAttribute(ClientContext.COOKIE_STORE, mAuthCookies); SchemeRegistry reg = mHttpClient.getConnectionManager().getSchemeRegistry(); try { Scheme s = new Scheme("https", new WebDavSocketFactory(mHost, 443), 443); reg.register(s); } catch (NoSuchAlgorithmException nsa) { Log.e(LOG_TAG, "NoSuchAlgorithmException in getHttpClient: " + nsa); throw new MessagingException("NoSuchAlgorithmException in getHttpClient: " + nsa); } catch (KeyManagementException kme) { Log.e(LOG_TAG, "KeyManagementException in getHttpClient: " + kme); throw new MessagingException("KeyManagementException in getHttpClient: " + kme); } } return mHttpClient; } private InputStream sendRequest(String url, String method, StringEntity messageBody, Map<String, String> headers, boolean tryAuth) throws MessagingException { if (url == null || method == null) { return null; } WebDavHttpClient httpClient = getHttpClient(); try { int statusCode; HttpGeneric httpMethod = new HttpGeneric(url); HttpResponse response; HttpEntity entity; if (messageBody != null) { httpMethod.setEntity(messageBody); } if (headers != null) { for (Map.Entry<String, String> entry : headers.entrySet()) { httpMethod.setHeader(entry.getKey(), entry.getValue()); } } if (mAuthentication == WebDavConstants.AUTH_TYPE_NONE) { if (!tryAuth || !authenticate()) { throw new MessagingException("Unable to authenticate in sendRequest()."); } } else if (mAuthentication == WebDavConstants.AUTH_TYPE_BASIC) { httpMethod.setHeader("Authorization", mAuthString); } httpMethod.setMethod(method); response = httpClient.executeOverride(httpMethod, mContext); statusCode = response.getStatusLine().getStatusCode(); entity = response.getEntity(); if (statusCode == 401) { throw new MessagingException("Invalid username or password for Basic authentication."); } else if (statusCode == 440) { if (tryAuth && mAuthentication == WebDavConstants.AUTH_TYPE_FORM_BASED) { // Our cookie expired, re-authenticate. doFBA(null); sendRequest(url, method, messageBody, headers, false); } else { throw new MessagingException("Authentication failure in sendRequest()."); } } else if (statusCode == 302) { handleUnexpectedRedirect(response, url); } else if (statusCode < 200 || statusCode >= 300) { throw new IOException("Error with code " + statusCode + " during request processing: " + response.getStatusLine().toString()); } if (entity != null) { return WebDavHttpClient.getUngzippedContent(entity); } } catch (UnsupportedEncodingException uee) { Log.e(LOG_TAG, "UnsupportedEncodingException: " + uee + "\nTrace: " + WebDavUtils.processException(uee)); throw new MessagingException("UnsupportedEncodingException", uee); } catch (IOException ioe) { Log.e(LOG_TAG, "IOException: " + ioe + "\nTrace: " + WebDavUtils.processException(ioe)); throw new MessagingException("IOException", ioe); } return null; } private void handleUnexpectedRedirect(HttpResponse response, String url) throws IOException { if (response.getFirstHeader("Location") != null) { // TODO: This may indicate lack of authentication or may alternatively be something we should follow throw new IOException("Unexpected redirect during request processing. " + "Expected response from: "+url+" but told to redirect to:" + response.getFirstHeader("Location").getValue()); } else { throw new IOException("Unexpected redirect during request processing. " + "Expected response from: " + url + " but not told where to redirect to"); } } public String getAuthString() { return mAuthString; } /** * Performs an httprequest to the supplied url using the supplied method. messageBody and headers are optional as * not all requests will need them. There are two signatures to support calls that don't require parsing of the * response. */ DataSet processRequest(String url, String method, String messageBody, Map<String, String> headers) throws MessagingException { return processRequest(url, method, messageBody, headers, true); } DataSet processRequest(String url, String method, String messageBody, Map<String, String> headers, boolean needsParsing) throws MessagingException { DataSet dataset = new DataSet(); if (K9MailLib.isDebug() && DEBUG_PROTOCOL_WEBDAV) { Log.v(LOG_TAG, "processRequest url = '" + url + "', method = '" + method + "', messageBody = '" + messageBody + "'"); } if (url == null || method == null) { return dataset; } getHttpClient(); try { StringEntity messageEntity = null; if (messageBody != null) { messageEntity = new StringEntity(messageBody); messageEntity.setContentType("text/xml"); } InputStream istream = sendRequest(url, method, messageEntity, headers, true); if (istream != null && needsParsing) { try { SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setNamespaceAware(true); //This should be a no-op on Android, but makes the tests work SAXParser sp = spf.newSAXParser(); XMLReader xr = sp.getXMLReader(); WebDavHandler myHandler = new WebDavHandler(); xr.setContentHandler(myHandler); xr.parse(new InputSource(istream)); dataset = myHandler.getDataSet(); } catch (SAXException se) { Log.e(LOG_TAG, "SAXException in processRequest() " + se + "\nTrace: " + WebDavUtils.processException(se)); throw new MessagingException("SAXException in processRequest() ", se); } catch (ParserConfigurationException pce) { Log.e(LOG_TAG, "ParserConfigurationException in processRequest() " + pce + "\nTrace: " + WebDavUtils.processException(pce)); throw new MessagingException("ParserConfigurationException in processRequest() ", pce); } istream.close(); } } catch (UnsupportedEncodingException uee) { Log.e(LOG_TAG, "UnsupportedEncodingException: " + uee + "\nTrace: " + WebDavUtils.processException(uee)); throw new MessagingException("UnsupportedEncodingException in processRequest() ", uee); } catch (IOException ioe) { Log.e(LOG_TAG, "IOException: " + ioe + "\nTrace: " + WebDavUtils.processException(ioe)); throw new MessagingException("IOException in processRequest() ", ioe); } return dataset; } @Override public boolean isSendCapable() { return true; } @Override public void sendMessages(List<? extends Message> messages) throws MessagingException { WebDavFolder tmpFolder = (WebDavFolder) getFolder(mStoreConfig.getDraftsFolderName()); try { tmpFolder.open(Folder.OPEN_MODE_RW); List<? extends Message> retMessages = tmpFolder.appendWebDavMessages(messages); tmpFolder.moveMessages(retMessages, getSendSpoolFolder()); } finally { if (tmpFolder != null) { tmpFolder.close(); } } } }
k9mail-library/src/main/java/com/fsck/k9/mail/store/webdav/WebDavStore.java
package com.fsck.k9.mail.store.webdav; import android.util.Log; import com.fsck.k9.mail.*; import com.fsck.k9.mail.filter.Base64; import com.fsck.k9.mail.CertificateValidationException; import com.fsck.k9.mail.store.RemoteStore; import com.fsck.k9.mail.store.StoreConfig; import org.apache.commons.io.IOUtils; import org.apache.http.*; import org.apache.http.client.CookieStore; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.protocol.ClientContext; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.BasicCookieStore; import org.apache.http.message.BasicNameValuePair; import org.apache.http.protocol.BasicHttpContext; import org.apache.http.protocol.HttpContext; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import javax.net.ssl.SSLException; import javax.xml.parsers.ParserConfigurationException; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import java.io.*; import java.net.URI; import java.net.URISyntaxException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.*; import java.util.zip.GZIPInputStream; import static com.fsck.k9.mail.K9MailLib.DEBUG_PROTOCOL_WEBDAV; import static com.fsck.k9.mail.K9MailLib.LOG_TAG; import static com.fsck.k9.mail.helper.UrlEncodingHelper.decodeUtf8; import static com.fsck.k9.mail.helper.UrlEncodingHelper.encodeUtf8; /** * <pre> * Uses WebDAV formatted HTTP calls to an MS Exchange server to fetch email * and email information. * </pre> */ public class WebDavStore extends RemoteStore { /** * Decodes a WebDavStore URI. * <p/> * <p>Possible forms:</p> * <pre> * webdav://user:password@server:port ConnectionSecurity.NONE * webdav+ssl+://user:password@server:port ConnectionSecurity.SSL_TLS_REQUIRED * </pre> */ public static WebDavStoreSettings decodeUri(String uri) { String host; int port; ConnectionSecurity connectionSecurity; String username = null; String password = null; String alias = null; String path = null; String authPath = null; String mailboxPath = null; URI webDavUri; try { webDavUri = new URI(uri); } catch (URISyntaxException use) { throw new IllegalArgumentException("Invalid WebDavStore URI", use); } String scheme = webDavUri.getScheme(); /* * Currently available schemes are: * webdav * webdav+ssl+ * * The following are obsolete schemes that may be found in pre-existing * settings from earlier versions or that may be found when imported. We * continue to recognize them and re-map them appropriately: * webdav+tls * webdav+tls+ * webdav+ssl */ if (scheme.equals("webdav")) { connectionSecurity = ConnectionSecurity.NONE; } else if (scheme.startsWith("webdav+")) { connectionSecurity = ConnectionSecurity.SSL_TLS_REQUIRED; } else { throw new IllegalArgumentException("Unsupported protocol (" + scheme + ")"); } host = webDavUri.getHost(); if (host.startsWith("http")) { String[] hostParts = host.split("://", 2); if (hostParts.length > 1) { host = hostParts[1]; } } port = webDavUri.getPort(); String userInfo = webDavUri.getUserInfo(); if (userInfo != null) { String[] userInfoParts = userInfo.split(":"); username = decodeUtf8(userInfoParts[0]); String userParts[] = username.split("\\\\", 2); if (userParts.length > 1) { alias = userParts[1]; } else { alias = username; } if (userInfoParts.length > 1) { password = decodeUtf8(userInfoParts[1]); } } String[] pathParts = webDavUri.getPath().split("\\|"); for (int i = 0, count = pathParts.length; i < count; i++) { if (i == 0) { if (pathParts[0] != null && pathParts[0].length() > 1) { path = pathParts[0]; } } else if (i == 1) { if (pathParts[1] != null && pathParts[1].length() > 1) { authPath = pathParts[1]; } } else if (i == 2) { if (pathParts[2] != null && pathParts[2].length() > 1) { mailboxPath = pathParts[2]; } } } return new WebDavStoreSettings(host, port, connectionSecurity, null, username, password, null, alias, path, authPath, mailboxPath); } /** * Creates a WebDavStore URI with the supplied settings. * * @param server The {@link ServerSettings} object that holds the server settings. * @return A WebDavStore URI that holds the same information as the {@code server} parameter. * @see StoreConfig#getStoreUri() * @see WebDavStore#decodeUri(String) */ public static String createUri(ServerSettings server) { String userEnc = encodeUtf8(server.username); String passwordEnc = (server.password != null) ? encodeUtf8(server.password) : ""; String scheme; switch (server.connectionSecurity) { case SSL_TLS_REQUIRED: scheme = "webdav+ssl+"; break; default: case NONE: scheme = "webdav"; break; } String userInfo = userEnc + ":" + passwordEnc; String uriPath; Map<String, String> extra = server.getExtra(); if (extra != null) { String path = extra.get(WebDavStoreSettings.PATH_KEY); path = (path != null) ? path : ""; String authPath = extra.get(WebDavStoreSettings.AUTH_PATH_KEY); authPath = (authPath != null) ? authPath : ""; String mailboxPath = extra.get(WebDavStoreSettings.MAILBOX_PATH_KEY); mailboxPath = (mailboxPath != null) ? mailboxPath : ""; uriPath = "/" + path + "|" + authPath + "|" + mailboxPath; } else { uriPath = "/||"; } try { return new URI(scheme, userInfo, server.host, server.port, uriPath, null, null).toString(); } catch (URISyntaxException e) { throw new IllegalArgumentException("Can't create WebDavStore URI", e); } } private ConnectionSecurity mConnectionSecurity; private String mUsername; /* Stores the username for authentications */ private String mAlias; /* Stores the alias for the user's mailbox */ private String mPassword; /* Stores the password for authentications */ private String mUrl; /* Stores the base URL for the server */ private String mHost; /* Stores the host name for the server */ private int mPort; private String mPath; /* Stores the path for the server */ private String mAuthPath; /* Stores the path off of the server to post data to for form based authentication */ private String mMailboxPath; /* Stores the user specified path to the mailbox */ private final WebDavHttpClient.WebDavHttpClientFactory mHttpClientFactory; private WebDavHttpClient mHttpClient = null; private HttpContext mContext = null; private String mAuthString; private CookieStore mAuthCookies = null; private short mAuthentication = WebDavConstants.AUTH_TYPE_NONE; private String mCachedLoginUrl; private Folder mSendFolder = null; private Map<String, WebDavFolder> mFolderList = new HashMap<String, WebDavFolder>(); public WebDavStore(StoreConfig storeConfig, WebDavHttpClient.WebDavHttpClientFactory clientFactory) throws MessagingException { super(storeConfig, null); mHttpClientFactory = clientFactory; WebDavStoreSettings settings; try { settings = WebDavStore.decodeUri(storeConfig.getStoreUri()); } catch (IllegalArgumentException e) { throw new MessagingException("Error while decoding store URI", e); } mHost = settings.host; mPort = settings.port; mConnectionSecurity = settings.connectionSecurity; mUsername = settings.username; mPassword = settings.password; mAlias = settings.alias; mPath = settings.path; mAuthPath = settings.authPath; mMailboxPath = settings.mailboxPath; if (mPath == null || mPath.equals("")) { mPath = "/Exchange"; } else if (!mPath.startsWith("/")) { mPath = "/" + mPath; } if (mMailboxPath == null || mMailboxPath.equals("")) { mMailboxPath = "/" + mAlias; } else if (!mMailboxPath.startsWith("/")) { mMailboxPath = "/" + mMailboxPath; } if (mAuthPath != null && !mAuthPath.equals("") && !mAuthPath.startsWith("/")) { mAuthPath = "/" + mAuthPath; } // The URL typically looks like the following: "https://mail.domain.com/Exchange/alias". // The inbox path would look like: "https://mail.domain.com/Exchange/alias/Inbox". mUrl = getRoot() + mPath + mMailboxPath; mAuthString = "Basic " + Base64.encode(mUsername + ":" + mPassword); } private String getRoot() { String root; if (mConnectionSecurity == ConnectionSecurity.SSL_TLS_REQUIRED) { root = "https"; } else { root = "http"; } root += "://" + mHost + ":" + mPort; return root; } HttpContext getContext() { return mContext; } short getAuthentication() { return mAuthentication; } StoreConfig getStoreConfig() { return mStoreConfig; } @Override public void checkSettings() throws MessagingException { authenticate(); } @Override public List<? extends Folder> getPersonalNamespaces(boolean forceListAll) throws MessagingException { List<Folder> folderList = new LinkedList<Folder>(); /** * We have to check authentication here so we have the proper URL stored */ getHttpClient(); /** * Firstly we get the "special" folders list (inbox, outbox, etc) * and setup the account accordingly */ Map<String, String> headers = new HashMap<String, String>(); headers.put("Depth", "0"); headers.put("Brief", "t"); DataSet dataset = processRequest(this.mUrl, "PROPFIND", getSpecialFoldersList(), headers); Map<String, String> specialFoldersMap = dataset.getSpecialFolderToUrl(); String folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_INBOX_FOLDER)); if (folderName != null) { mStoreConfig.setAutoExpandFolderName(folderName); mStoreConfig.setInboxFolderName(folderName); } folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_DRAFTS_FOLDER)); if (folderName != null) mStoreConfig.setDraftsFolderName(folderName); folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_TRASH_FOLDER)); if (folderName != null) mStoreConfig.setTrashFolderName(folderName); folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_SPAM_FOLDER)); if (folderName != null) mStoreConfig.setSpamFolderName(folderName); // K-9 Mail's outbox is a special local folder and different from Exchange/WebDAV's outbox. /* folderName = getFolderName(specialFoldersMap.get(DAV_MAIL_OUTBOX_FOLDER)); if (folderName != null) mAccount.setOutboxFolderName(folderName); */ folderName = getFolderName(specialFoldersMap.get(WebDavConstants.DAV_MAIL_SENT_FOLDER)); if (folderName != null) mStoreConfig.setSentFolderName(folderName); /** * Next we get all the folders (including "special" ones) */ headers = new HashMap<String, String>(); headers.put("Brief", "t"); dataset = processRequest(this.mUrl, "SEARCH", getFolderListXml(), headers); String[] folderUrls = dataset.getHrefs(); for (String tempUrl : folderUrls) { WebDavFolder folder = createFolder(tempUrl); if (folder != null) folderList.add(folder); } return folderList; } /** * Creates a folder using the URL passed as parameter (only if it has not been * already created) and adds this to our store folder map. * * @param folderUrl * @return */ private WebDavFolder createFolder(String folderUrl) { if (folderUrl == null) return null; WebDavFolder wdFolder = null; String folderName = getFolderName(folderUrl); if (folderName != null) { wdFolder = getFolder(folderName); if (wdFolder != null) { wdFolder.setUrl(folderUrl); } } // else: Unknown URL format => NO Folder created return wdFolder; } private String getFolderName(String folderUrl) { if (folderUrl == null) return null; // Here we extract the folder name starting from the complete url. // folderUrl is in the form http://mail.domain.com/exchange/username/foldername // so we need "foldername" which is the string after the fifth slash int folderSlash = -1; for (int j = 0; j < 5; j++) { folderSlash = folderUrl.indexOf('/', folderSlash + 1); if (folderSlash < 0) break; } if (folderSlash > 0) { String fullPathName; // Removes the final slash if present if (folderUrl.charAt(folderUrl.length() - 1) == '/') fullPathName = folderUrl.substring(folderSlash + 1, folderUrl.length() - 1); else fullPathName = folderUrl.substring(folderSlash + 1); // Decodes the url-encoded folder name (i.e. "My%20folder" => "My Folder" return decodeUtf8(fullPathName); } return null; } @Override public WebDavFolder getFolder(String name) { WebDavFolder folder; if ((folder = this.mFolderList.get(name)) == null) { folder = new WebDavFolder(this, name); mFolderList.put(name, folder); } return folder; } public Folder getSendSpoolFolder() throws MessagingException { if (mSendFolder == null) mSendFolder = getFolder(WebDavConstants.DAV_MAIL_SEND_FOLDER); return mSendFolder; } @Override public boolean isMoveCapable() { return true; } @Override public boolean isCopyCapable() { return true; } private String getSpecialFoldersList() { StringBuilder builder = new StringBuilder(200); builder.append("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"no\"?>"); builder.append("<propfind xmlns=\"DAV:\">"); builder.append("<prop>"); builder.append("<").append(WebDavConstants.DAV_MAIL_INBOX_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_DRAFTS_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_OUTBOX_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_SENT_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_TRASH_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); // This should always be ##DavMailSubmissionURI## for which we already have a constant // buffer.append("<sendmsg xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("<").append(WebDavConstants.DAV_MAIL_SPAM_FOLDER).append(" xmlns=\"urn:schemas:httpmail:\"/>"); builder.append("</prop>"); builder.append("</propfind>"); return builder.toString(); } /*************************************************************** * WebDAV XML Request body retrieval functions */ private String getFolderListXml() { StringBuilder builder = new StringBuilder(200); builder.append("<?xml version='1.0' ?>"); builder.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); builder.append("SELECT \"DAV:uid\", \"DAV:ishidden\"\r\n"); builder.append(" FROM SCOPE('deep traversal of \"").append(this.mUrl).append("\"')\r\n"); builder.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=True\r\n"); builder.append("</a:sql></a:searchrequest>\r\n"); return builder.toString(); } String getMessageCountXml(String messageState) { StringBuilder builder = new StringBuilder(200); builder.append("<?xml version='1.0' ?>"); builder.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); builder.append("SELECT \"DAV:visiblecount\"\r\n"); builder.append(" FROM \"\"\r\n"); builder.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND \"urn:schemas:httpmail:read\"=") .append(messageState).append("\r\n"); builder.append(" GROUP BY \"DAV:ishidden\"\r\n"); builder.append("</a:sql></a:searchrequest>\r\n"); return builder.toString(); } String getMessageEnvelopeXml(String[] uids) { StringBuilder buffer = new StringBuilder(200); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"DAV:uid\", \"DAV:getcontentlength\","); buffer.append(" \"urn:schemas:mailheader:mime-version\","); buffer.append(" \"urn:schemas:mailheader:content-type\","); buffer.append(" \"urn:schemas:mailheader:subject\","); buffer.append(" \"urn:schemas:mailheader:date\","); buffer.append(" \"urn:schemas:mailheader:thread-topic\","); buffer.append(" \"urn:schemas:mailheader:thread-index\","); buffer.append(" \"urn:schemas:mailheader:from\","); buffer.append(" \"urn:schemas:mailheader:to\","); buffer.append(" \"urn:schemas:mailheader:in-reply-to\","); buffer.append(" \"urn:schemas:mailheader:cc\","); buffer.append(" \"urn:schemas:httpmail:read\""); buffer.append(" \r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMessagesXml() { StringBuilder builder = new StringBuilder(200); builder.append("<?xml version='1.0' ?>"); builder.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); builder.append("SELECT \"DAV:uid\"\r\n"); builder.append(" FROM \"\"\r\n"); builder.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False\r\n"); builder.append("</a:sql></a:searchrequest>\r\n"); return builder.toString(); } String getMessageUrlsXml(String[] uids) { StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"urn:schemas:httpmail:read\", \"DAV:uid\"\r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMessageFlagsXml(String[] uids) throws MessagingException { if (uids.length == 0) { throw new MessagingException("Attempt to get flags on 0 length array for uids"); } StringBuilder buffer = new StringBuilder(200); buffer.append("<?xml version='1.0' ?>"); buffer.append("<a:searchrequest xmlns:a='DAV:'><a:sql>\r\n"); buffer.append("SELECT \"urn:schemas:httpmail:read\", \"DAV:uid\"\r\n"); buffer.append(" FROM \"\"\r\n"); buffer.append(" WHERE \"DAV:ishidden\"=False AND \"DAV:isfolder\"=False AND "); for (int i = 0, count = uids.length; i < count; i++) { if (i != 0) { buffer.append(" OR "); } buffer.append(" \"DAV:uid\"='").append(uids[i]).append("' "); } buffer.append("\r\n"); buffer.append("</a:sql></a:searchrequest>\r\n"); return buffer.toString(); } String getMarkMessagesReadXml(String[] urls, boolean read) { StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>\r\n"); buffer.append("<a:propertyupdate xmlns:a='DAV:' xmlns:b='urn:schemas:httpmail:'>\r\n"); buffer.append("<a:target>\r\n"); for (String url : urls) { buffer.append(" <a:href>").append(url).append("</a:href>\r\n"); } buffer.append("</a:target>\r\n"); buffer.append("<a:set>\r\n"); buffer.append(" <a:prop>\r\n"); buffer.append(" <b:read>").append(read ? "1" : "0").append("</b:read>\r\n"); buffer.append(" </a:prop>\r\n"); buffer.append("</a:set>\r\n"); buffer.append("</a:propertyupdate>\r\n"); return buffer.toString(); } // For flag: // http://www.devnewsgroups.net/group/microsoft.public.exchange.development/topic27175.aspx // "<m:0x10900003>1</m:0x10900003>" & _ String getMoveOrCopyMessagesReadXml(String[] urls, boolean isMove) { String action = (isMove ? "move" : "copy"); StringBuilder buffer = new StringBuilder(600); buffer.append("<?xml version='1.0' ?>\r\n"); buffer.append("<a:").append(action).append(" xmlns:a='DAV:' xmlns:b='urn:schemas:httpmail:'>\r\n"); buffer.append("<a:target>\r\n"); for (String url : urls) { buffer.append(" <a:href>").append(url).append("</a:href>\r\n"); } buffer.append("</a:target>\r\n"); buffer.append("</a:").append(action).append(">\r\n"); return buffer.toString(); } /*************************************************************** * Authentication related methods */ /** * Determines which type of authentication Exchange is using and authenticates appropriately. * * @throws MessagingException */ public boolean authenticate() throws MessagingException { try { if (mAuthentication == WebDavConstants.AUTH_TYPE_NONE) { ConnectionInfo info = doInitialConnection(); if (info.requiredAuthType == WebDavConstants.AUTH_TYPE_BASIC) { HttpGeneric request = new HttpGeneric(mUrl); request.setMethod("GET"); request.setHeader("Authorization", mAuthString); WebDavHttpClient httpClient = getHttpClient(); HttpResponse response = httpClient.executeOverride(request, mContext); int statusCode = response.getStatusLine().getStatusCode(); if (statusCode >= 200 && statusCode < 300) { mAuthentication = WebDavConstants.AUTH_TYPE_BASIC; } else if (statusCode == 401) { throw new MessagingException("Invalid username or password for authentication."); } else { throw new MessagingException("Error with code " + response.getStatusLine().getStatusCode() + " during request processing: " + response.getStatusLine().toString()); } } else if (info.requiredAuthType == WebDavConstants.AUTH_TYPE_FORM_BASED) { doFBA(info); } } else if (mAuthentication == WebDavConstants.AUTH_TYPE_BASIC) { // Nothing to do, we authenticate with every request when // using basic authentication. } else if (mAuthentication == WebDavConstants.AUTH_TYPE_FORM_BASED) { // Our cookie expired, re-authenticate. doFBA(null); } } catch (IOException ioe) { Log.e(LOG_TAG, "Error during authentication: " + ioe + "\nStack: " + WebDavUtils.processException(ioe)); throw new MessagingException("Error during authentication", ioe); } return mAuthentication != WebDavConstants.AUTH_TYPE_NONE; } /** * Makes the initial connection to Exchange for authentication. Determines the type of authentication necessary for * the server. * * @throws MessagingException */ private ConnectionInfo doInitialConnection() throws MessagingException { // For our initial connection we are sending an empty GET request to // the configured URL, which should be in the following form: // https://mail.server.com/Exchange/alias // // Possible status codes include: // 401 - the server uses basic authentication // 30x - the server is trying to redirect us to an OWA login // 20x - success // // The latter two indicate form-based authentication. ConnectionInfo info = new ConnectionInfo(); WebDavHttpClient httpClient = getHttpClient(); HttpGeneric request = new HttpGeneric(mUrl); request.setMethod("GET"); try { HttpResponse response = httpClient.executeOverride(request, mContext); info.statusCode = response.getStatusLine().getStatusCode(); if (info.statusCode == 401) { // 401 is the "Unauthorized" status code, meaning the server wants // an authentication header for basic authentication. info.requiredAuthType = WebDavConstants.AUTH_TYPE_BASIC; } else if ((info.statusCode >= 200 && info.statusCode < 300) || // Success (info.statusCode >= 300 && info.statusCode < 400) || // Redirect (info.statusCode == 440)) { // Unauthorized // We will handle all 3 situations the same. First we take an educated // guess at where the authorization DLL is located. If this is this // doesn't work, then we'll use the redirection URL for OWA login given // to us by exchange. We can use this to scrape the location of the // authorization URL. info.requiredAuthType = WebDavConstants.AUTH_TYPE_FORM_BASED; if (mAuthPath != null && !mAuthPath.equals("")) { // The user specified their own authentication path, use that. info.guessedAuthUrl = getRoot() + mAuthPath; } else { // Use the default path to the authentication dll. info.guessedAuthUrl = getRoot() + "/exchweb/bin/auth/owaauth.dll"; } // Determine where the server is trying to redirect us. Header location = response.getFirstHeader("Location"); if (location != null) { info.redirectUrl = location.getValue(); } } else { throw new IOException("Error with code " + info.statusCode + " during request processing: " + response.getStatusLine().toString()); } } catch (SSLException e) { throw new CertificateValidationException(e.getMessage(), e); } catch (IOException ioe) { Log.e(LOG_TAG, "IOException: " + ioe + "\nTrace: " + WebDavUtils.processException(ioe)); throw new MessagingException("IOException", ioe); } return info; } /** * Performs form-based authentication. * * @throws MessagingException */ public void doFBA(ConnectionInfo info) throws IOException, MessagingException { // Clear out cookies from any previous authentication. if (mAuthCookies != null) mAuthCookies.clear(); WebDavHttpClient httpClient = getHttpClient(); String loginUrl; if (info != null) { loginUrl = info.guessedAuthUrl; } else if (mCachedLoginUrl != null && !mCachedLoginUrl.equals("")) { loginUrl = mCachedLoginUrl; } else { throw new MessagingException("No valid login URL available for form-based authentication."); } HttpGeneric request = new HttpGeneric(loginUrl); request.setMethod("POST"); // Build the POST data. List<BasicNameValuePair> pairs = new ArrayList<BasicNameValuePair>(); pairs.add(new BasicNameValuePair("destination", mUrl)); pairs.add(new BasicNameValuePair("username", mUsername)); pairs.add(new BasicNameValuePair("password", mPassword)); pairs.add(new BasicNameValuePair("flags", "0")); pairs.add(new BasicNameValuePair("SubmitCreds", "Log+On")); pairs.add(new BasicNameValuePair("forcedownlevel", "0")); pairs.add(new BasicNameValuePair("trusted", "0")); UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(pairs); request.setEntity(formEntity); HttpResponse response = httpClient.executeOverride(request, mContext); boolean authenticated = testAuthenticationResponse(response); if (!authenticated) { // Check the response from the authentication request above for a form action. String formAction = findFormAction(WebDavHttpClient.getUngzippedContent(response.getEntity())); if (formAction == null) { // If there is no form action, try using our redirect URL from the initial connection. if (info != null && info.redirectUrl != null && !info.redirectUrl.equals("")) { loginUrl = info.redirectUrl; request = new HttpGeneric(loginUrl); request.setMethod("GET"); response = httpClient.executeOverride(request, mContext); formAction = findFormAction(WebDavHttpClient.getUngzippedContent(response.getEntity())); } } if (formAction != null) { try { URI formActionUri = new URI(formAction); URI loginUri = new URI(loginUrl); if (formActionUri.isAbsolute()) { // The form action is an absolute URL, just use it. loginUrl = formAction; } else { // Append the form action to our current URL, minus the file name. String urlPath; if (formAction.startsWith("/")) { urlPath = formAction; } else { urlPath = loginUri.getPath(); int lastPathPos = urlPath.lastIndexOf('/'); if (lastPathPos > -1) { urlPath = urlPath.substring(0, lastPathPos + 1); urlPath = urlPath.concat(formAction); } } // Reconstruct the login URL based on the original login URL and the form action. URI finalUri = new URI(loginUri.getScheme(), loginUri.getUserInfo(), loginUri.getHost(), loginUri.getPort(), urlPath, null, null); loginUrl = finalUri.toString(); } // Retry the login using our new URL. request = new HttpGeneric(loginUrl); request.setMethod("POST"); request.setEntity(formEntity); response = httpClient.executeOverride(request, mContext); authenticated = testAuthenticationResponse(response); } catch (URISyntaxException e) { Log.e(LOG_TAG, "URISyntaxException caught " + e + "\nTrace: " + WebDavUtils.processException(e)); throw new MessagingException("URISyntaxException caught", e); } } else { throw new MessagingException("A valid URL for Exchange authentication could not be found."); } } if (authenticated) { mAuthentication = WebDavConstants.AUTH_TYPE_FORM_BASED; mCachedLoginUrl = loginUrl; } else { throw new MessagingException("Invalid credentials provided for authentication."); } } /** * Searches the specified stream for an HTML form and returns the form's action target. * * @throws IOException */ private String findFormAction(InputStream istream) throws IOException { String formAction = null; BufferedReader reader = new BufferedReader(new InputStreamReader(istream), 4096); String tempText; // Read line by line until we find something like: <form action="owaauth.dll"...>. while ((tempText = reader.readLine()) != null && formAction == null) { if (tempText.contains(" action=")) { String[] actionParts = tempText.split(" action="); if (actionParts.length > 1 && actionParts[1].length() > 1) { char openQuote = actionParts[1].charAt(0); int closePos = actionParts[1].indexOf(openQuote, 1); if (closePos > 1) { formAction = actionParts[1].substring(1, closePos); // Remove any GET parameters. int quesPos = formAction.indexOf('?'); if (quesPos != -1) { formAction = formAction.substring(0, quesPos); } } } } } return formAction; } private boolean testAuthenticationResponse(HttpResponse response) throws MessagingException { boolean authenticated = false; int statusCode = response.getStatusLine().getStatusCode(); // Exchange 2007 will return a 302 status code no matter what. if (((statusCode >= 200 && statusCode < 300) || statusCode == 302) && mAuthCookies != null && !mAuthCookies.getCookies().isEmpty()) { // We may be authenticated, we need to send a test request to know for sure. // Exchange 2007 adds the same cookies whether the username and password were valid or not. ConnectionInfo info = doInitialConnection(); if (info.statusCode >= 200 && info.statusCode < 300) { authenticated = true; } else if (info.statusCode == 302) { // If we are successfully authenticated, Exchange will try to redirect us to our OWA inbox. // Otherwise, it will redirect us to a logon page. // Our URL is in the form: https://hostname:port/Exchange/alias. // The redirect is in the form: https://hostname:port/owa/alias. // Do a simple replace and compare the resulting strings. try { String thisPath = new URI(mUrl).getPath(); String redirectPath = new URI(info.redirectUrl).getPath(); if (!thisPath.endsWith("/")) { thisPath = thisPath.concat("/"); } if (!redirectPath.endsWith("/")) { redirectPath = redirectPath.concat("/"); } if (redirectPath.equalsIgnoreCase(thisPath)) { authenticated = true; } else { int found = thisPath.indexOf('/', 1); if (found != -1) { String replace = thisPath.substring(0, found + 1); redirectPath = redirectPath.replace("/owa/", replace); if (redirectPath.equalsIgnoreCase(thisPath)) { authenticated = true; } } } } catch (URISyntaxException e) { Log.e(LOG_TAG, "URISyntaxException caught " + e + "\nTrace: " + WebDavUtils.processException(e)); throw new MessagingException("URISyntaxException caught", e); } } } return authenticated; } public CookieStore getAuthCookies() { return mAuthCookies; } public String getAlias() { return mAlias; } public String getUrl() { return mUrl; } public WebDavHttpClient getHttpClient() throws MessagingException { if (mHttpClient == null) { mHttpClient = mHttpClientFactory.create(); // Disable automatic redirects on the http client. mHttpClient.getParams().setBooleanParameter("http.protocol.handle-redirects", false); // Setup a cookie store for forms-based authentication. mContext = new BasicHttpContext(); mAuthCookies = new BasicCookieStore(); mContext.setAttribute(ClientContext.COOKIE_STORE, mAuthCookies); SchemeRegistry reg = mHttpClient.getConnectionManager().getSchemeRegistry(); try { Scheme s = new Scheme("https", new WebDavSocketFactory(mHost, 443), 443); reg.register(s); } catch (NoSuchAlgorithmException nsa) { Log.e(LOG_TAG, "NoSuchAlgorithmException in getHttpClient: " + nsa); throw new MessagingException("NoSuchAlgorithmException in getHttpClient: " + nsa); } catch (KeyManagementException kme) { Log.e(LOG_TAG, "KeyManagementException in getHttpClient: " + kme); throw new MessagingException("KeyManagementException in getHttpClient: " + kme); } } return mHttpClient; } private InputStream sendRequest(String url, String method, StringEntity messageBody, Map<String, String> headers, boolean tryAuth) throws MessagingException { if (url == null || method == null) { return null; } WebDavHttpClient httpClient = getHttpClient(); try { int statusCode; HttpGeneric httpMethod = new HttpGeneric(url); HttpResponse response; HttpEntity entity; if (messageBody != null) { httpMethod.setEntity(messageBody); } if (headers != null) { for (Map.Entry<String, String> entry : headers.entrySet()) { httpMethod.setHeader(entry.getKey(), entry.getValue()); } } if (mAuthentication == WebDavConstants.AUTH_TYPE_NONE) { if (!tryAuth || !authenticate()) { throw new MessagingException("Unable to authenticate in sendRequest()."); } } else if (mAuthentication == WebDavConstants.AUTH_TYPE_BASIC) { httpMethod.setHeader("Authorization", mAuthString); } httpMethod.setMethod(method); response = httpClient.executeOverride(httpMethod, mContext); statusCode = response.getStatusLine().getStatusCode(); entity = response.getEntity(); if (statusCode == 401) { throw new MessagingException("Invalid username or password for Basic authentication."); } else if (statusCode == 440) { if (tryAuth && mAuthentication == WebDavConstants.AUTH_TYPE_FORM_BASED) { // Our cookie expired, re-authenticate. doFBA(null); sendRequest(url, method, messageBody, headers, false); } else { throw new MessagingException("Authentication failure in sendRequest()."); } } else if (statusCode < 200 || statusCode >= 300) { throw new IOException("Error with code " + statusCode + " during request processing: " + response.getStatusLine().toString()); } if (entity != null) { return WebDavHttpClient.getUngzippedContent(entity); } } catch (UnsupportedEncodingException uee) { Log.e(LOG_TAG, "UnsupportedEncodingException: " + uee + "\nTrace: " + WebDavUtils.processException(uee)); throw new MessagingException("UnsupportedEncodingException", uee); } catch (IOException ioe) { Log.e(LOG_TAG, "IOException: " + ioe + "\nTrace: " + WebDavUtils.processException(ioe)); throw new MessagingException("IOException", ioe); } return null; } public String getAuthString() { return mAuthString; } /** * Performs an httprequest to the supplied url using the supplied method. messageBody and headers are optional as * not all requests will need them. There are two signatures to support calls that don't require parsing of the * response. */ DataSet processRequest(String url, String method, String messageBody, Map<String, String> headers) throws MessagingException { return processRequest(url, method, messageBody, headers, true); } DataSet processRequest(String url, String method, String messageBody, Map<String, String> headers, boolean needsParsing) throws MessagingException { DataSet dataset = new DataSet(); if (K9MailLib.isDebug() && DEBUG_PROTOCOL_WEBDAV) { Log.v(LOG_TAG, "processRequest url = '" + url + "', method = '" + method + "', messageBody = '" + messageBody + "'"); } if (url == null || method == null) { return dataset; } getHttpClient(); try { StringEntity messageEntity = null; if (messageBody != null) { messageEntity = new StringEntity(messageBody); messageEntity.setContentType("text/xml"); } InputStream istream = sendRequest(url, method, messageEntity, headers, true); if (istream != null && needsParsing) { try { SAXParserFactory spf = SAXParserFactory.newInstance(); spf.setNamespaceAware(true); //This should be a no-op on Android, but makes the tests work SAXParser sp = spf.newSAXParser(); XMLReader xr = sp.getXMLReader(); WebDavHandler myHandler = new WebDavHandler(); xr.setContentHandler(myHandler); xr.parse(new InputSource(istream)); dataset = myHandler.getDataSet(); } catch (SAXException se) { Log.e(LOG_TAG, "SAXException in processRequest() " + se + "\nTrace: " + WebDavUtils.processException(se)); throw new MessagingException("SAXException in processRequest() ", se); } catch (ParserConfigurationException pce) { Log.e(LOG_TAG, "ParserConfigurationException in processRequest() " + pce + "\nTrace: " + WebDavUtils.processException(pce)); throw new MessagingException("ParserConfigurationException in processRequest() ", pce); } istream.close(); } } catch (UnsupportedEncodingException uee) { Log.e(LOG_TAG, "UnsupportedEncodingException: " + uee + "\nTrace: " + WebDavUtils.processException(uee)); throw new MessagingException("UnsupportedEncodingException in processRequest() ", uee); } catch (IOException ioe) { Log.e(LOG_TAG, "IOException: " + ioe + "\nTrace: " + WebDavUtils.processException(ioe)); throw new MessagingException("IOException in processRequest() ", ioe); } return dataset; } @Override public boolean isSendCapable() { return true; } @Override public void sendMessages(List<? extends Message> messages) throws MessagingException { WebDavFolder tmpFolder = (WebDavFolder) getFolder(mStoreConfig.getDraftsFolderName()); try { tmpFolder.open(Folder.OPEN_MODE_RW); List<? extends Message> retMessages = tmpFolder.appendWebDavMessages(messages); tmpFolder.moveMessages(retMessages, getSendSpoolFolder()); } finally { if (tmpFolder != null) { tmpFolder.close(); } } } }
WebDAV: Improve logging when we get an unexpected redirect during sync
k9mail-library/src/main/java/com/fsck/k9/mail/store/webdav/WebDavStore.java
WebDAV: Improve logging when we get an unexpected redirect during sync
<ide><path>9mail-library/src/main/java/com/fsck/k9/mail/store/webdav/WebDavStore.java <ide> import com.fsck.k9.mail.store.RemoteStore; <ide> import com.fsck.k9.mail.store.StoreConfig; <ide> <del>import org.apache.commons.io.IOUtils; <ide> import org.apache.http.*; <ide> import org.apache.http.client.CookieStore; <ide> import org.apache.http.client.entity.UrlEncodedFormEntity; <ide> import java.net.URISyntaxException; <ide> import java.security.KeyManagementException; <ide> import java.security.NoSuchAlgorithmException; <del>import java.text.DateFormat; <del>import java.text.SimpleDateFormat; <ide> import java.util.*; <del>import java.util.zip.GZIPInputStream; <ide> <ide> import static com.fsck.k9.mail.K9MailLib.DEBUG_PROTOCOL_WEBDAV; <ide> import static com.fsck.k9.mail.K9MailLib.LOG_TAG; <ide> } else { <ide> throw new MessagingException("Authentication failure in sendRequest()."); <ide> } <add> } else if (statusCode == 302) { <add> handleUnexpectedRedirect(response, url); <ide> } else if (statusCode < 200 || statusCode >= 300) { <ide> throw new IOException("Error with code " + statusCode + " during request processing: " + <ide> response.getStatusLine().toString()); <ide> } <ide> <ide> return null; <add> } <add> <add> private void handleUnexpectedRedirect(HttpResponse response, String url) throws IOException { <add> if (response.getFirstHeader("Location") != null) { <add> // TODO: This may indicate lack of authentication or may alternatively be something we should follow <add> throw new IOException("Unexpected redirect during request processing. " + <add> "Expected response from: "+url+" but told to redirect to:" + <add> response.getFirstHeader("Location").getValue()); <add> } else { <add> throw new IOException("Unexpected redirect during request processing. " + <add> "Expected response from: " + url + " but not told where to redirect to"); <add> } <ide> } <ide> <ide> public String getAuthString() {
Java
apache-2.0
a50a730db501ae0b6687ecdaf840453dcd6e2f87
0
juanavelez/hazelcast,Donnerbart/hazelcast,tombujok/hazelcast,emre-aydin/hazelcast,mesutcelik/hazelcast,tkountis/hazelcast,lmjacksoniii/hazelcast,tufangorel/hazelcast,emre-aydin/hazelcast,juanavelez/hazelcast,tkountis/hazelcast,tombujok/hazelcast,emre-aydin/hazelcast,lmjacksoniii/hazelcast,tufangorel/hazelcast,mesutcelik/hazelcast,tkountis/hazelcast,dsukhoroslov/hazelcast,emrahkocaman/hazelcast,emrahkocaman/hazelcast,tufangorel/hazelcast,dbrimley/hazelcast,mesutcelik/hazelcast,dbrimley/hazelcast,Donnerbart/hazelcast,mdogan/hazelcast,dbrimley/hazelcast,Donnerbart/hazelcast,dsukhoroslov/hazelcast,mdogan/hazelcast,mdogan/hazelcast
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.nio; import com.hazelcast.core.HazelcastException; import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.logging.Logger; import com.hazelcast.nio.serialization.Data; import com.hazelcast.spi.annotation.PrivateApi; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectStreamClass; import java.io.OutputStream; import java.lang.reflect.Modifier; import java.lang.reflect.Proxy; import java.nio.ByteBuffer; import java.util.zip.DataFormatException; import java.util.zip.Deflater; import java.util.zip.Inflater; @PrivateApi public final class IOUtil { public static final byte PRIMITIVE_TYPE_BOOLEAN = 1; public static final byte PRIMITIVE_TYPE_BYTE = 2; public static final byte PRIMITIVE_TYPE_SHORT = 3; public static final byte PRIMITIVE_TYPE_INTEGER = 4; public static final byte PRIMITIVE_TYPE_LONG = 5; public static final byte PRIMITIVE_TYPE_FLOAT = 6; public static final byte PRIMITIVE_TYPE_DOUBLE = 7; public static final byte PRIMITIVE_TYPE_UTF = 8; private IOUtil() { } public static ByteBuffer newByteBuffer(int bufferSize, boolean direct) { if (direct) { return ByteBuffer.allocateDirect(bufferSize); } else { return ByteBuffer.allocate(bufferSize); } } /** * This method has a direct dependency on how objects are serialized in * {@link com.hazelcast.internal.serialization.impl.DataSerializableSerializer}. If the stream * format is changed, this extraction method must be changed as well. */ public static long extractOperationCallId(Data data, InternalSerializationService serializationService) throws IOException { ObjectDataInput input = serializationService.createObjectDataInput(data); boolean identified = input.readBoolean(); if (identified) { // read factoryId input.readInt(); // read typeId input.readInt(); } else { // read classname input.readUTF(); } // read callId return input.readLong(); } public static void writeByteArray(ObjectDataOutput out, byte[] value) throws IOException { int size = (value == null) ? 0 : value.length; out.writeInt(size); if (size > 0) { out.write(value); } } public static byte[] readByteArray(ObjectDataInput in) throws IOException { int size = in.readInt(); if (size == 0) { return null; } else { byte[] b = new byte[size]; in.readFully(b); return b; } } public static void writeObject(ObjectDataOutput out, Object object) throws IOException { boolean isBinary = object instanceof Data; out.writeBoolean(isBinary); if (isBinary) { out.writeData((Data) object); } else { out.writeObject(object); } } @SuppressWarnings("unchecked") public static <T> T readObject(ObjectDataInput in) throws IOException { boolean isBinary = in.readBoolean(); if (isBinary) { return (T) in.readData(); } return in.readObject(); } public static ObjectInputStream newObjectInputStream(final ClassLoader classLoader, InputStream in) throws IOException { return new ClassLoaderAwareObjectInputStream(classLoader, in); } public static OutputStream newOutputStream(final ByteBuffer dst) { return new OutputStream() { public void write(int b) throws IOException { dst.put((byte) b); } public void write(byte[] bytes, int off, int len) throws IOException { dst.put(bytes, off, len); } }; } public static InputStream newInputStream(final ByteBuffer src) { return new InputStream() { public int read() throws IOException { if (!src.hasRemaining()) { return -1; } return src.get() & 0xff; } public int read(byte[] bytes, int off, int len) throws IOException { if (!src.hasRemaining()) { return -1; } len = Math.min(len, src.remaining()); src.get(bytes, off, len); return len; } }; } public static int copyToHeapBuffer(ByteBuffer src, ByteBuffer dst) { if (src == null) { return 0; } int n = Math.min(src.remaining(), dst.remaining()); if (n > 0) { if (n < 16) { for (int i = 0; i < n; i++) { dst.put(src.get()); } } else { int srcPosition = src.position(); int destPosition = dst.position(); System.arraycopy(src.array(), srcPosition, dst.array(), destPosition, n); src.position(srcPosition + n); dst.position(destPosition + n); } } return n; } public static byte[] compress(byte[] input) throws IOException { if (input.length == 0) { return new byte[0]; } Deflater compressor = new Deflater(); compressor.setLevel(Deflater.BEST_SPEED); compressor.setInput(input); compressor.finish(); ByteArrayOutputStream bos = new ByteArrayOutputStream(input.length / 10); byte[] buf = new byte[input.length / 10]; while (!compressor.finished()) { int count = compressor.deflate(buf); bos.write(buf, 0, count); } bos.close(); compressor.end(); return bos.toByteArray(); } public static byte[] decompress(byte[] compressedData) throws IOException { if (compressedData.length == 0) { return compressedData; } Inflater inflater = new Inflater(); inflater.setInput(compressedData); ByteArrayOutputStream bos = new ByteArrayOutputStream(compressedData.length); byte[] buf = new byte[1024]; while (!inflater.finished()) { try { int count = inflater.inflate(buf); bos.write(buf, 0, count); } catch (DataFormatException e) { Logger.getLogger(IOUtil.class).finest("Decompression failed", e); } } bos.close(); inflater.end(); return bos.toByteArray(); } public static void writeAttributeValue(Object value, ObjectDataOutput out) throws IOException { Class<?> type = value.getClass(); if (type.equals(Boolean.class)) { out.writeByte(PRIMITIVE_TYPE_BOOLEAN); out.writeBoolean((Boolean) value); } else if (type.equals(Byte.class)) { out.writeByte(PRIMITIVE_TYPE_BYTE); out.writeByte((Byte) value); } else if (type.equals(Short.class)) { out.writeByte(PRIMITIVE_TYPE_SHORT); out.writeShort((Short) value); } else if (type.equals(Integer.class)) { out.writeByte(PRIMITIVE_TYPE_INTEGER); out.writeInt((Integer) value); } else if (type.equals(Long.class)) { out.writeByte(PRIMITIVE_TYPE_LONG); out.writeLong((Long) value); } else if (type.equals(Float.class)) { out.writeByte(PRIMITIVE_TYPE_FLOAT); out.writeFloat((Float) value); } else if (type.equals(Double.class)) { out.writeByte(PRIMITIVE_TYPE_DOUBLE); out.writeDouble((Double) value); } else if (type.equals(String.class)) { out.writeByte(PRIMITIVE_TYPE_UTF); out.writeUTF((String) value); } else { throw new IllegalStateException("Illegal attribute type id found"); } } public static Object readAttributeValue(ObjectDataInput in) throws IOException { byte type = in.readByte(); switch (type) { case PRIMITIVE_TYPE_BOOLEAN: return in.readBoolean(); case PRIMITIVE_TYPE_BYTE: return in.readByte(); case PRIMITIVE_TYPE_SHORT: return in.readShort(); case PRIMITIVE_TYPE_INTEGER: return in.readInt(); case PRIMITIVE_TYPE_LONG: return in.readLong(); case PRIMITIVE_TYPE_FLOAT: return in.readFloat(); case PRIMITIVE_TYPE_DOUBLE: return in.readDouble(); case PRIMITIVE_TYPE_UTF: return in.readUTF(); default: throw new IllegalStateException("Illegal attribute type id found"); } } /** * Quietly attempts to close a {@link Closeable} resource, swallowing any exception. * @param closeable the resource to close. If {@code null}, no action is taken. */ public static void closeResource(Closeable closeable) { if (closeable == null) { return; } try { closeable.close(); } catch (IOException e) { Logger.getLogger(IOUtil.class).finest("closeResource failed", e); } } /** * Ensures that the file described by the supplied parameter does not exist * after the method returns. If the file didn't exist, returns silently. * If the file could not be deleted, fails with an exception. * If the file is a directory, its children are recursively deleted. */ public static void delete(File f) { if (!f.exists()) { return; } File[] subFiles = f.listFiles(); if (subFiles != null) { for (File sf : subFiles) { delete(sf); } } if (!f.delete()) { throw new HazelcastException("Failed to delete " + f); } } /** * Ensures that the file described by {@code fileNow} is renamed to file described by {@code fileToBe}. * First attempts to perform a direct, atomic rename; if that fails, checks whether the target exists, * deletes it, and retries. Throws an exception in each case where the rename failed. * * @param fileNow describes an existing file * @param fileToBe describes the desired pathname for the file */ public static void rename(File fileNow, File fileToBe) { if (fileNow.renameTo(fileToBe)) { return; } if (!fileNow.exists()) { throw new HazelcastException(String.format("Failed to rename %s to %s because %s doesn't exist.", fileNow, fileToBe, fileNow)); } if (!fileToBe.exists()) { throw new HazelcastException(String.format("Failed to rename %s to %s even though %s doesn't exist.", fileNow, fileToBe, fileToBe)); } if (!fileToBe.delete()) { throw new HazelcastException(String.format("Failed to rename %s to %s. %s exists and could not be deleted.", fileNow, fileToBe, fileToBe)); } if (!fileNow.renameTo(fileToBe)) { throw new HazelcastException(String.format("Failed to rename %s to %s even after deleting %s.", fileNow, fileToBe, fileToBe)); } } public static String toFileName(String name) { return name.replaceAll("[:\\\\/*\"?|<>',]", "_"); } private static final class ClassLoaderAwareObjectInputStream extends ObjectInputStream { private final ClassLoader classLoader; private ClassLoaderAwareObjectInputStream(final ClassLoader classLoader, final InputStream in) throws IOException { super(in); this.classLoader = classLoader; } protected Class<?> resolveClass(ObjectStreamClass desc) throws ClassNotFoundException { return ClassLoaderUtil.loadClass(classLoader, desc.getName()); } protected Class<?> resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { ClassLoader theClassLoader = getClassLoader(); if (theClassLoader == null) { return super.resolveProxyClass(interfaces); } ClassLoader nonPublicLoader = null; Class<?>[] classObjs = new Class<?>[interfaces.length]; for (int i = 0; i < interfaces.length; i++) { Class<?> cl = ClassLoaderUtil.loadClass(theClassLoader, interfaces[i]); if ((cl.getModifiers() & Modifier.PUBLIC) == 0) { if (nonPublicLoader != null) { if (nonPublicLoader != cl.getClassLoader()) { throw new IllegalAccessError("conflicting non-public interface class loaders"); } } else { nonPublicLoader = cl.getClassLoader(); } } classObjs[i] = cl; } try { return Proxy.getProxyClass(nonPublicLoader != null ? nonPublicLoader : theClassLoader, classObjs); } catch (IllegalArgumentException e) { throw new ClassNotFoundException(null, e); } } private ClassLoader getClassLoader() { ClassLoader theClassLoader = this.classLoader; if (theClassLoader == null) { theClassLoader = Thread.currentThread().getContextClassLoader(); } return theClassLoader; } } }
hazelcast/src/main/java/com/hazelcast/nio/IOUtil.java
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.nio; import com.hazelcast.core.HazelcastException; import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.logging.Logger; import com.hazelcast.nio.serialization.Data; import com.hazelcast.spi.annotation.PrivateApi; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectStreamClass; import java.io.OutputStream; import java.lang.reflect.Modifier; import java.lang.reflect.Proxy; import java.nio.ByteBuffer; import java.util.zip.DataFormatException; import java.util.zip.Deflater; import java.util.zip.Inflater; @PrivateApi public final class IOUtil { public static final byte PRIMITIVE_TYPE_BOOLEAN = 1; public static final byte PRIMITIVE_TYPE_BYTE = 2; public static final byte PRIMITIVE_TYPE_SHORT = 3; public static final byte PRIMITIVE_TYPE_INTEGER = 4; public static final byte PRIMITIVE_TYPE_LONG = 5; public static final byte PRIMITIVE_TYPE_FLOAT = 6; public static final byte PRIMITIVE_TYPE_DOUBLE = 7; public static final byte PRIMITIVE_TYPE_UTF = 8; private IOUtil() { } public static ByteBuffer newByteBuffer(int bufferSize, boolean direct) { if (direct) { return ByteBuffer.allocateDirect(bufferSize); } else { return ByteBuffer.allocate(bufferSize); } } /** * This method has a direct dependency on how objects are serialized in * {@link com.hazelcast.internal.serialization.impl.DataSerializableSerializer}. If the stream * format is changed, this extraction method must be changed as well. */ public static long extractOperationCallId(Data data, InternalSerializationService serializationService) throws IOException { ObjectDataInput input = serializationService.createObjectDataInput(data); boolean identified = input.readBoolean(); if (identified) { // read factoryId input.readInt(); // read typeId input.readInt(); } else { // read classname input.readUTF(); } // read callId return input.readLong(); } public static void writeByteArray(ObjectDataOutput out, byte[] value) throws IOException { int size = (value == null) ? 0 : value.length; out.writeInt(size); if (size > 0) { out.write(value); } } public static byte[] readByteArray(ObjectDataInput in) throws IOException { int size = in.readInt(); if (size == 0) { return null; } else { byte[] b = new byte[size]; in.readFully(b); return b; } } public static void writeObject(ObjectDataOutput out, Object object) throws IOException { boolean isBinary = object instanceof Data; out.writeBoolean(isBinary); if (isBinary) { out.writeData((Data) object); } else { out.writeObject(object); } } @SuppressWarnings("unchecked") public static <T> T readObject(ObjectDataInput in) throws IOException { boolean isBinary = in.readBoolean(); if (isBinary) { return (T) in.readData(); } return in.readObject(); } public static ObjectInputStream newObjectInputStream(final ClassLoader classLoader, InputStream in) throws IOException { return new ClassLoaderAwareObjectInputStream(classLoader, in); } private static final class ClassLoaderAwareObjectInputStream extends ObjectInputStream { private final ClassLoader classLoader; private ClassLoaderAwareObjectInputStream(final ClassLoader classLoader, final InputStream in) throws IOException { super(in); this.classLoader = classLoader; } protected Class<?> resolveClass(ObjectStreamClass desc) throws ClassNotFoundException { return ClassLoaderUtil.loadClass(classLoader, desc.getName()); } protected Class<?> resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { ClassLoader theClassLoader = getClassLoader(); if (theClassLoader == null) { return super.resolveProxyClass(interfaces); } ClassLoader nonPublicLoader = null; Class<?>[] classObjs = new Class<?>[interfaces.length]; for (int i = 0; i < interfaces.length; i++) { Class<?> cl = ClassLoaderUtil.loadClass(theClassLoader, interfaces[i]); if ((cl.getModifiers() & Modifier.PUBLIC) == 0) { if (nonPublicLoader != null) { if (nonPublicLoader != cl.getClassLoader()) { throw new IllegalAccessError("conflicting non-public interface class loaders"); } } else { nonPublicLoader = cl.getClassLoader(); } } classObjs[i] = cl; } try { return Proxy.getProxyClass(nonPublicLoader != null ? nonPublicLoader : theClassLoader, classObjs); } catch (IllegalArgumentException e) { throw new ClassNotFoundException(null, e); } } private ClassLoader getClassLoader() { ClassLoader theClassLoader = this.classLoader; if (theClassLoader == null) { theClassLoader = Thread.currentThread().getContextClassLoader(); } return theClassLoader; } } public static OutputStream newOutputStream(final ByteBuffer dst) { return new OutputStream() { public void write(int b) throws IOException { dst.put((byte) b); } public void write(byte[] bytes, int off, int len) throws IOException { dst.put(bytes, off, len); } }; } public static InputStream newInputStream(final ByteBuffer src) { return new InputStream() { public int read() throws IOException { if (!src.hasRemaining()) { return -1; } return src.get() & 0xff; } public int read(byte[] bytes, int off, int len) throws IOException { if (!src.hasRemaining()) { return -1; } len = Math.min(len, src.remaining()); src.get(bytes, off, len); return len; } }; } public static int copyToHeapBuffer(ByteBuffer src, ByteBuffer dst) { if (src == null) { return 0; } int n = Math.min(src.remaining(), dst.remaining()); if (n > 0) { if (n < 16) { for (int i = 0; i < n; i++) { dst.put(src.get()); } } else { int srcPosition = src.position(); int destPosition = dst.position(); System.arraycopy(src.array(), srcPosition, dst.array(), destPosition, n); src.position(srcPosition + n); dst.position(destPosition + n); } } return n; } public static byte[] compress(byte[] input) throws IOException { if (input.length == 0) { return new byte[0]; } Deflater compressor = new Deflater(); compressor.setLevel(Deflater.BEST_SPEED); compressor.setInput(input); compressor.finish(); ByteArrayOutputStream bos = new ByteArrayOutputStream(input.length / 10); byte[] buf = new byte[input.length / 10]; while (!compressor.finished()) { int count = compressor.deflate(buf); bos.write(buf, 0, count); } bos.close(); compressor.end(); return bos.toByteArray(); } public static byte[] decompress(byte[] compressedData) throws IOException { if (compressedData.length == 0) { return compressedData; } Inflater inflater = new Inflater(); inflater.setInput(compressedData); ByteArrayOutputStream bos = new ByteArrayOutputStream(compressedData.length); byte[] buf = new byte[1024]; while (!inflater.finished()) { try { int count = inflater.inflate(buf); bos.write(buf, 0, count); } catch (DataFormatException e) { Logger.getLogger(IOUtil.class).finest("Decompression failed", e); } } bos.close(); inflater.end(); return bos.toByteArray(); } public static void writeAttributeValue(Object value, ObjectDataOutput out) throws IOException { Class<?> type = value.getClass(); if (type.equals(Boolean.class)) { out.writeByte(PRIMITIVE_TYPE_BOOLEAN); out.writeBoolean((Boolean) value); } else if (type.equals(Byte.class)) { out.writeByte(PRIMITIVE_TYPE_BYTE); out.writeByte((Byte) value); } else if (type.equals(Short.class)) { out.writeByte(PRIMITIVE_TYPE_SHORT); out.writeShort((Short) value); } else if (type.equals(Integer.class)) { out.writeByte(PRIMITIVE_TYPE_INTEGER); out.writeInt((Integer) value); } else if (type.equals(Long.class)) { out.writeByte(PRIMITIVE_TYPE_LONG); out.writeLong((Long) value); } else if (type.equals(Float.class)) { out.writeByte(PRIMITIVE_TYPE_FLOAT); out.writeFloat((Float) value); } else if (type.equals(Double.class)) { out.writeByte(PRIMITIVE_TYPE_DOUBLE); out.writeDouble((Double) value); } else if (type.equals(String.class)) { out.writeByte(PRIMITIVE_TYPE_UTF); out.writeUTF((String) value); } else { throw new IllegalStateException("Illegal attribute type id found"); } } public static Object readAttributeValue(ObjectDataInput in) throws IOException { byte type = in.readByte(); switch (type) { case PRIMITIVE_TYPE_BOOLEAN: return in.readBoolean(); case PRIMITIVE_TYPE_BYTE: return in.readByte(); case PRIMITIVE_TYPE_SHORT: return in.readShort(); case PRIMITIVE_TYPE_INTEGER: return in.readInt(); case PRIMITIVE_TYPE_LONG: return in.readLong(); case PRIMITIVE_TYPE_FLOAT: return in.readFloat(); case PRIMITIVE_TYPE_DOUBLE: return in.readDouble(); case PRIMITIVE_TYPE_UTF: return in.readUTF(); default: throw new IllegalStateException("Illegal attribute type id found"); } } /** * Quietly attempts to close a {@link Closeable} resource, swallowing any exception. * @param closeable the resource to close. If {@code null}, no action is taken. */ public static void closeResource(Closeable closeable) { if (closeable == null) { return; } try { closeable.close(); } catch (IOException e) { Logger.getLogger(IOUtil.class).finest("closeResource failed", e); } } /** * Ensures that the file described by the supplied parameter does not exist * after the method returns. If the file didn't exist, returns silently. * If the file could not be deleted, fails with an exception. * If the file is a directory, its children are recursively deleted. */ public static void delete(File f) { if (!f.exists()) { return; } File[] subFiles = f.listFiles(); if (subFiles != null) { for (File sf : subFiles) { delete(sf); } } if (!f.delete()) { throw new HazelcastException("Failed to delete " + f); } } public static String toFileName(String name) { return name.replaceAll("[:\\\\/*\"?|<>',]", "_"); } }
Add IOUtil.rename()
hazelcast/src/main/java/com/hazelcast/nio/IOUtil.java
Add IOUtil.rename()
<ide><path>azelcast/src/main/java/com/hazelcast/nio/IOUtil.java <ide> <ide> public static ObjectInputStream newObjectInputStream(final ClassLoader classLoader, InputStream in) throws IOException { <ide> return new ClassLoaderAwareObjectInputStream(classLoader, in); <del> } <del> <del> private static final class ClassLoaderAwareObjectInputStream extends ObjectInputStream { <del> <del> private final ClassLoader classLoader; <del> <del> private ClassLoaderAwareObjectInputStream(final ClassLoader classLoader, final InputStream in) throws IOException { <del> super(in); <del> this.classLoader = classLoader; <del> } <del> <del> protected Class<?> resolveClass(ObjectStreamClass desc) throws ClassNotFoundException { <del> return ClassLoaderUtil.loadClass(classLoader, desc.getName()); <del> } <del> <del> protected Class<?> resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { <del> ClassLoader theClassLoader = getClassLoader(); <del> if (theClassLoader == null) { <del> return super.resolveProxyClass(interfaces); <del> } <del> ClassLoader nonPublicLoader = null; <del> Class<?>[] classObjs = new Class<?>[interfaces.length]; <del> for (int i = 0; i < interfaces.length; i++) { <del> Class<?> cl = ClassLoaderUtil.loadClass(theClassLoader, interfaces[i]); <del> if ((cl.getModifiers() & Modifier.PUBLIC) == 0) { <del> if (nonPublicLoader != null) { <del> if (nonPublicLoader != cl.getClassLoader()) { <del> throw new IllegalAccessError("conflicting non-public interface class loaders"); <del> } <del> } else { <del> nonPublicLoader = cl.getClassLoader(); <del> } <del> } <del> classObjs[i] = cl; <del> } <del> try { <del> return Proxy.getProxyClass(nonPublicLoader != null ? nonPublicLoader : theClassLoader, classObjs); <del> } catch (IllegalArgumentException e) { <del> throw new ClassNotFoundException(null, e); <del> } <del> } <del> <del> private ClassLoader getClassLoader() { <del> ClassLoader theClassLoader = this.classLoader; <del> if (theClassLoader == null) { <del> theClassLoader = Thread.currentThread().getContextClassLoader(); <del> } <del> return theClassLoader; <del> } <del> <ide> } <ide> <ide> public static OutputStream newOutputStream(final ByteBuffer dst) { <ide> } <ide> } <ide> <add> /** <add> * Ensures that the file described by {@code fileNow} is renamed to file described by {@code fileToBe}. <add> * First attempts to perform a direct, atomic rename; if that fails, checks whether the target exists, <add> * deletes it, and retries. Throws an exception in each case where the rename failed. <add> * <add> * @param fileNow describes an existing file <add> * @param fileToBe describes the desired pathname for the file <add> */ <add> public static void rename(File fileNow, File fileToBe) { <add> if (fileNow.renameTo(fileToBe)) { <add> return; <add> } <add> if (!fileNow.exists()) { <add> throw new HazelcastException(String.format("Failed to rename %s to %s because %s doesn't exist.", <add> fileNow, fileToBe, fileNow)); <add> <add> } <add> if (!fileToBe.exists()) { <add> throw new HazelcastException(String.format("Failed to rename %s to %s even though %s doesn't exist.", <add> fileNow, fileToBe, fileToBe)); <add> <add> } <add> if (!fileToBe.delete()) { <add> throw new HazelcastException(String.format("Failed to rename %s to %s. %s exists and could not be deleted.", <add> fileNow, fileToBe, fileToBe)); <add> } <add> if (!fileNow.renameTo(fileToBe)) { <add> throw new HazelcastException(String.format("Failed to rename %s to %s even after deleting %s.", <add> fileNow, fileToBe, fileToBe)); <add> } <add> } <add> <ide> public static String toFileName(String name) { <ide> return name.replaceAll("[:\\\\/*\"?|<>',]", "_"); <ide> } <add> <add> <add> private static final class ClassLoaderAwareObjectInputStream extends ObjectInputStream { <add> <add> private final ClassLoader classLoader; <add> <add> private ClassLoaderAwareObjectInputStream(final ClassLoader classLoader, final InputStream in) throws IOException { <add> super(in); <add> this.classLoader = classLoader; <add> } <add> <add> protected Class<?> resolveClass(ObjectStreamClass desc) throws ClassNotFoundException { <add> return ClassLoaderUtil.loadClass(classLoader, desc.getName()); <add> } <add> <add> protected Class<?> resolveProxyClass(String[] interfaces) throws IOException, ClassNotFoundException { <add> ClassLoader theClassLoader = getClassLoader(); <add> if (theClassLoader == null) { <add> return super.resolveProxyClass(interfaces); <add> } <add> ClassLoader nonPublicLoader = null; <add> Class<?>[] classObjs = new Class<?>[interfaces.length]; <add> for (int i = 0; i < interfaces.length; i++) { <add> Class<?> cl = ClassLoaderUtil.loadClass(theClassLoader, interfaces[i]); <add> if ((cl.getModifiers() & Modifier.PUBLIC) == 0) { <add> if (nonPublicLoader != null) { <add> if (nonPublicLoader != cl.getClassLoader()) { <add> throw new IllegalAccessError("conflicting non-public interface class loaders"); <add> } <add> } else { <add> nonPublicLoader = cl.getClassLoader(); <add> } <add> } <add> classObjs[i] = cl; <add> } <add> try { <add> return Proxy.getProxyClass(nonPublicLoader != null ? nonPublicLoader : theClassLoader, classObjs); <add> } catch (IllegalArgumentException e) { <add> throw new ClassNotFoundException(null, e); <add> } <add> } <add> <add> private ClassLoader getClassLoader() { <add> ClassLoader theClassLoader = this.classLoader; <add> if (theClassLoader == null) { <add> theClassLoader = Thread.currentThread().getContextClassLoader(); <add> } <add> return theClassLoader; <add> } <add> <add> } <ide> }
Java
apache-2.0
4e49df8c7d07efdf54f90e8a94b1f25b3e072167
0
kuujo/onos,kuujo/onos,oplinkoms/onos,kuujo/onos,kuujo/onos,oplinkoms/onos,kuujo/onos,opennetworkinglab/onos,opennetworkinglab/onos,oplinkoms/onos,gkatsikas/onos,gkatsikas/onos,oplinkoms/onos,gkatsikas/onos,oplinkoms/onos,opennetworkinglab/onos,gkatsikas/onos,kuujo/onos,opennetworkinglab/onos,oplinkoms/onos,kuujo/onos,oplinkoms/onos,gkatsikas/onos,gkatsikas/onos,opennetworkinglab/onos,opennetworkinglab/onos
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.segmentrouting; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Modified; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.packet.Ethernet; import org.onlab.packet.ICMP6; import org.onlab.packet.IPv4; import org.onlab.packet.IPv6; import org.onlab.packet.IpAddress; import org.onlab.packet.IpPrefix; import org.onlab.packet.VlanId; import org.onlab.util.KryoNamespace; import org.onlab.util.Tools; import org.onosproject.cfg.ComponentConfigService; import org.onosproject.cluster.ClusterService; import org.onosproject.cluster.LeadershipService; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.event.Event; import org.onosproject.mastership.MastershipService; import org.onosproject.mcast.api.McastEvent; import org.onosproject.mcast.api.McastListener; import org.onosproject.mcast.api.MulticastRouteService; import org.onosproject.net.ConnectPoint; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.Host; import org.onosproject.net.HostId; import org.onosproject.net.Link; import org.onosproject.net.Port; import org.onosproject.net.PortNumber; import org.onosproject.net.config.ConfigException; import org.onosproject.net.config.ConfigFactory; import org.onosproject.net.config.NetworkConfigEvent; import org.onosproject.net.config.NetworkConfigListener; import org.onosproject.net.config.NetworkConfigRegistry; import org.onosproject.net.config.basics.InterfaceConfig; import org.onosproject.net.config.basics.McastConfig; import org.onosproject.net.config.basics.SubjectFactories; import org.onosproject.net.device.DeviceAdminService; import org.onosproject.net.device.DeviceEvent; import org.onosproject.net.device.DeviceListener; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flow.TrafficSelector; import org.onosproject.net.flow.TrafficTreatment; import org.onosproject.net.flowobjective.FlowObjectiveService; import org.onosproject.net.host.HostEvent; import org.onosproject.net.host.HostListener; import org.onosproject.net.host.HostLocationProbingService; import org.onosproject.net.host.HostService; import org.onosproject.net.host.InterfaceIpAddress; import org.onosproject.net.intf.Interface; import org.onosproject.net.intf.InterfaceService; import org.onosproject.net.link.LinkEvent; import org.onosproject.net.link.LinkListener; import org.onosproject.net.link.LinkService; import org.onosproject.net.neighbour.NeighbourResolutionService; import org.onosproject.net.packet.InboundPacket; import org.onosproject.net.packet.PacketContext; import org.onosproject.net.packet.PacketProcessor; import org.onosproject.net.packet.PacketService; import org.onosproject.net.topology.TopologyEvent; import org.onosproject.net.topology.TopologyListener; import org.onosproject.net.topology.TopologyService; import org.onosproject.routeservice.ResolvedRoute; import org.onosproject.routeservice.RouteEvent; import org.onosproject.routeservice.RouteListener; import org.onosproject.routeservice.RouteService; import org.onosproject.segmentrouting.config.DeviceConfigNotFoundException; import org.onosproject.segmentrouting.config.DeviceConfiguration; import org.onosproject.segmentrouting.config.SegmentRoutingAppConfig; import org.onosproject.segmentrouting.config.SegmentRoutingDeviceConfig; import org.onosproject.segmentrouting.config.XConnectConfig; import org.onosproject.segmentrouting.grouphandler.DefaultGroupHandler; import org.onosproject.segmentrouting.grouphandler.DestinationSet; import org.onosproject.segmentrouting.grouphandler.NextNeighbors; import org.onosproject.segmentrouting.mcast.McastHandler; import org.onosproject.segmentrouting.mcast.McastRole; import org.onosproject.segmentrouting.pwaas.DefaultL2Tunnel; import org.onosproject.segmentrouting.pwaas.DefaultL2TunnelDescription; import org.onosproject.segmentrouting.pwaas.DefaultL2TunnelHandler; import org.onosproject.segmentrouting.pwaas.DefaultL2TunnelPolicy; import org.onosproject.segmentrouting.pwaas.L2Tunnel; import org.onosproject.segmentrouting.pwaas.L2TunnelHandler; import org.onosproject.segmentrouting.pwaas.L2TunnelPolicy; import org.onosproject.segmentrouting.pwaas.L2TunnelDescription; import org.onosproject.segmentrouting.storekey.DestinationSetNextObjectiveStoreKey; import org.onosproject.segmentrouting.storekey.McastStoreKey; import org.onosproject.segmentrouting.storekey.PortNextObjectiveStoreKey; import org.onosproject.segmentrouting.storekey.VlanNextObjectiveStoreKey; import org.onosproject.segmentrouting.storekey.XConnectStoreKey; import org.onosproject.store.serializers.KryoNamespaces; import org.onosproject.store.service.EventuallyConsistentMap; import org.onosproject.store.service.EventuallyConsistentMapBuilder; import org.onosproject.store.service.StorageService; import org.onosproject.store.service.WallClockTimestamp; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collections; import java.util.Dictionary; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkState; import static org.onlab.packet.Ethernet.TYPE_ARP; import static org.onlab.util.Tools.groupedThreads; import static org.onosproject.net.config.NetworkConfigEvent.Type.CONFIG_REGISTERED; import static org.onosproject.net.config.NetworkConfigEvent.Type.CONFIG_UNREGISTERED; /** * Segment routing manager. */ @Service @Component(immediate = true) public class SegmentRoutingManager implements SegmentRoutingService { private static Logger log = LoggerFactory.getLogger(SegmentRoutingManager.class); private static final String NOT_MASTER = "Current instance is not the master of {}. Ignore."; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) private ComponentConfigService compCfgService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) private NeighbourResolutionService neighbourResolutionService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) PacketService packetService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) HostService hostService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) HostLocationProbingService probingService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public DeviceService deviceService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) DeviceAdminService deviceAdminService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public FlowObjectiveService flowObjectiveService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public LinkService linkService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public MastershipService mastershipService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public StorageService storageService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public MulticastRouteService multicastRouteService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public TopologyService topologyService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) RouteService routeService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public NetworkConfigRegistry cfgService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public InterfaceService interfaceService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public ClusterService clusterService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public LeadershipService leadershipService; @Property(name = "activeProbing", boolValue = true, label = "Enable active probing to discover dual-homed hosts.") boolean activeProbing = true; ArpHandler arpHandler = null; IcmpHandler icmpHandler = null; IpHandler ipHandler = null; RoutingRulePopulator routingRulePopulator = null; ApplicationId appId; DeviceConfiguration deviceConfiguration = null; DefaultRoutingHandler defaultRoutingHandler = null; private TunnelHandler tunnelHandler = null; private PolicyHandler policyHandler = null; private InternalPacketProcessor processor = null; private InternalLinkListener linkListener = null; private InternalDeviceListener deviceListener = null; private AppConfigHandler appCfgHandler = null; public XConnectHandler xConnectHandler = null; McastHandler mcastHandler = null; HostHandler hostHandler = null; private RouteHandler routeHandler = null; LinkHandler linkHandler = null; private SegmentRoutingNeighbourDispatcher neighbourHandler = null; private DefaultL2TunnelHandler l2TunnelHandler = null; private TopologyHandler topologyHandler = null; private final InternalHostListener hostListener = new InternalHostListener(); private final InternalConfigListener cfgListener = new InternalConfigListener(this); private final InternalMcastListener mcastListener = new InternalMcastListener(); private final InternalRouteEventListener routeListener = new InternalRouteEventListener(); private final InternalTopologyListener topologyListener = new InternalTopologyListener(); // Handles device, link, topology and network config events private ScheduledExecutorService mainEventExecutor = Executors .newScheduledThreadPool(1, groupedThreads("sr-event-main", "%d", log)); // Handles host, route, mcast events private ScheduledExecutorService hostEventExecutor = Executors .newScheduledThreadPool(1, groupedThreads("sr-event-host", "%d", log)); private ScheduledExecutorService routeEventExecutor = Executors .newScheduledThreadPool(1, groupedThreads("sr-event-route", "%d", log)); private ScheduledExecutorService mcastEventExecutor = Executors .newScheduledThreadPool(1, groupedThreads("sr-event-mcast", "%d", log)); Map<DeviceId, DefaultGroupHandler> groupHandlerMap = new ConcurrentHashMap<>(); /** * Per device next objective ID store with (device id + destination set) as key. * Used to keep track on MPLS group information. */ private EventuallyConsistentMap<DestinationSetNextObjectiveStoreKey, NextNeighbors> dsNextObjStore = null; /** * Per device next objective ID store with (device id + vlanid) as key. * Used to keep track on L2 flood group information. */ private EventuallyConsistentMap<VlanNextObjectiveStoreKey, Integer> vlanNextObjStore = null; /** * Per device next objective ID store with (device id + port + treatment + meta) as key. * Used to keep track on L2 interface group and L3 unicast group information. */ private EventuallyConsistentMap<PortNextObjectiveStoreKey, Integer> portNextObjStore = null; private EventuallyConsistentMap<String, Tunnel> tunnelStore = null; private EventuallyConsistentMap<String, Policy> policyStore = null; private AtomicBoolean programmingScheduled = new AtomicBoolean(); private final ConfigFactory<DeviceId, SegmentRoutingDeviceConfig> deviceConfigFactory = new ConfigFactory<DeviceId, SegmentRoutingDeviceConfig>( SubjectFactories.DEVICE_SUBJECT_FACTORY, SegmentRoutingDeviceConfig.class, "segmentrouting") { @Override public SegmentRoutingDeviceConfig createConfig() { return new SegmentRoutingDeviceConfig(); } }; private final ConfigFactory<ApplicationId, SegmentRoutingAppConfig> appConfigFactory = new ConfigFactory<ApplicationId, SegmentRoutingAppConfig>( SubjectFactories.APP_SUBJECT_FACTORY, SegmentRoutingAppConfig.class, "segmentrouting") { @Override public SegmentRoutingAppConfig createConfig() { return new SegmentRoutingAppConfig(); } }; private final ConfigFactory<ApplicationId, XConnectConfig> xConnectConfigFactory = new ConfigFactory<ApplicationId, XConnectConfig>( SubjectFactories.APP_SUBJECT_FACTORY, XConnectConfig.class, "xconnect") { @Override public XConnectConfig createConfig() { return new XConnectConfig(); } }; private ConfigFactory<ApplicationId, McastConfig> mcastConfigFactory = new ConfigFactory<ApplicationId, McastConfig>( SubjectFactories.APP_SUBJECT_FACTORY, McastConfig.class, "multicast") { @Override public McastConfig createConfig() { return new McastConfig(); } }; private static final Object THREAD_SCHED_LOCK = new Object(); private static int numOfEventsQueued = 0; private static int numOfEventsExecuted = 0; private static int numOfHandlerExecution = 0; private static int numOfHandlerScheduled = 0; /** * Segment Routing App ID. */ public static final String APP_NAME = "org.onosproject.segmentrouting"; /** * The default VLAN ID assigned to the interfaces without subnet config. */ public static final VlanId INTERNAL_VLAN = VlanId.vlanId((short) 4094); @Activate protected void activate(ComponentContext context) { appId = coreService.registerApplication(APP_NAME); log.debug("Creating EC map nsnextobjectivestore"); EventuallyConsistentMapBuilder<DestinationSetNextObjectiveStoreKey, NextNeighbors> nsNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); dsNextObjStore = nsNextObjMapBuilder .withName("nsnextobjectivestore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); log.trace("Current size {}", dsNextObjStore.size()); log.debug("Creating EC map vlannextobjectivestore"); EventuallyConsistentMapBuilder<VlanNextObjectiveStoreKey, Integer> vlanNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); vlanNextObjStore = vlanNextObjMapBuilder .withName("vlannextobjectivestore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); log.debug("Creating EC map subnetnextobjectivestore"); EventuallyConsistentMapBuilder<PortNextObjectiveStoreKey, Integer> portNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); portNextObjStore = portNextObjMapBuilder .withName("portnextobjectivestore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); EventuallyConsistentMapBuilder<String, Tunnel> tunnelMapBuilder = storageService.eventuallyConsistentMapBuilder(); tunnelStore = tunnelMapBuilder .withName("tunnelstore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); EventuallyConsistentMapBuilder<String, Policy> policyMapBuilder = storageService.eventuallyConsistentMapBuilder(); policyStore = policyMapBuilder .withName("policystore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); compCfgService.preSetProperty("org.onosproject.net.group.impl.GroupManager", "purgeOnDisconnection", "true"); compCfgService.preSetProperty("org.onosproject.net.flow.impl.FlowRuleManager", "purgeOnDisconnection", "true"); compCfgService.preSetProperty("org.onosproject.provider.host.impl.HostLocationProvider", "requestInterceptsEnabled", "false"); compCfgService.preSetProperty("org.onosproject.net.neighbour.impl.NeighbourResolutionManager", "requestInterceptsEnabled", "false"); compCfgService.preSetProperty("org.onosproject.dhcprelay.DhcpRelayManager", "arpEnabled", "false"); compCfgService.preSetProperty("org.onosproject.net.host.impl.HostManager", "greedyLearningIpv6", "true"); compCfgService.preSetProperty("org.onosproject.routing.cpr.ControlPlaneRedirectManager", "forceUnprovision", "true"); compCfgService.preSetProperty("org.onosproject.routeservice.store.RouteStoreImpl", "distributed", "true"); compCfgService.preSetProperty("org.onosproject.provider.host.impl.HostLocationProvider", "multihomingEnabled", "true"); compCfgService.preSetProperty("org.onosproject.provider.lldp.impl.LldpLinkProvider", "staleLinkAge", "15000"); compCfgService.preSetProperty("org.onosproject.net.host.impl.HostManager", "allowDuplicateIps", "false"); compCfgService.registerProperties(getClass()); modified(context); processor = new InternalPacketProcessor(); linkListener = new InternalLinkListener(); deviceListener = new InternalDeviceListener(); appCfgHandler = new AppConfigHandler(this); xConnectHandler = new XConnectHandler(this); mcastHandler = new McastHandler(this); hostHandler = new HostHandler(this); linkHandler = new LinkHandler(this); routeHandler = new RouteHandler(this); neighbourHandler = new SegmentRoutingNeighbourDispatcher(this); l2TunnelHandler = new DefaultL2TunnelHandler(this); topologyHandler = new TopologyHandler(this); cfgService.addListener(cfgListener); cfgService.registerConfigFactory(deviceConfigFactory); cfgService.registerConfigFactory(appConfigFactory); cfgService.registerConfigFactory(xConnectConfigFactory); cfgService.registerConfigFactory(mcastConfigFactory); log.info("Configuring network before adding listeners"); cfgListener.configureNetwork(); hostService.addListener(hostListener); packetService.addProcessor(processor, PacketProcessor.director(2)); linkService.addListener(linkListener); deviceService.addListener(deviceListener); multicastRouteService.addListener(mcastListener); routeService.addListener(routeListener); topologyService.addListener(topologyListener); linkHandler.init(); l2TunnelHandler.init(); log.info("Started"); } KryoNamespace.Builder createSerializer() { return new KryoNamespace.Builder() .register(KryoNamespaces.API) .register(DestinationSetNextObjectiveStoreKey.class, VlanNextObjectiveStoreKey.class, DestinationSet.class, NextNeighbors.class, Tunnel.class, DefaultTunnel.class, Policy.class, TunnelPolicy.class, Policy.Type.class, PortNextObjectiveStoreKey.class, XConnectStoreKey.class, L2Tunnel.class, L2TunnelPolicy.class, DefaultL2Tunnel.class, DefaultL2TunnelPolicy.class ); } @Deactivate protected void deactivate() { cfgService.removeListener(cfgListener); cfgService.unregisterConfigFactory(deviceConfigFactory); cfgService.unregisterConfigFactory(appConfigFactory); cfgService.unregisterConfigFactory(xConnectConfigFactory); cfgService.unregisterConfigFactory(mcastConfigFactory); compCfgService.unregisterProperties(getClass(), false); hostService.removeListener(hostListener); packetService.removeProcessor(processor); linkService.removeListener(linkListener); deviceService.removeListener(deviceListener); multicastRouteService.removeListener(mcastListener); routeService.removeListener(routeListener); topologyService.removeListener(topologyListener); neighbourResolutionService.unregisterNeighbourHandlers(appId); processor = null; linkListener = null; deviceListener = null; groupHandlerMap.clear(); dsNextObjStore.destroy(); vlanNextObjStore.destroy(); portNextObjStore.destroy(); tunnelStore.destroy(); policyStore.destroy(); mcastHandler.terminate(); log.info("Stopped"); } @Modified private void modified(ComponentContext context) { Dictionary<?, ?> properties = context.getProperties(); if (properties == null) { return; } String strActiveProving = Tools.get(properties, "activeProbing"); boolean expectActiveProbing = Boolean.parseBoolean(strActiveProving); if (expectActiveProbing != activeProbing) { activeProbing = expectActiveProbing; log.info("{} active probing", activeProbing ? "Enabling" : "Disabling"); } } @Override public List<Tunnel> getTunnels() { return tunnelHandler.getTunnels(); } @Override public TunnelHandler.Result createTunnel(Tunnel tunnel) { return tunnelHandler.createTunnel(tunnel); } @Override public TunnelHandler.Result removeTunnel(Tunnel tunnel) { for (Policy policy: policyHandler.getPolicies()) { if (policy.type() == Policy.Type.TUNNEL_FLOW) { TunnelPolicy tunnelPolicy = (TunnelPolicy) policy; if (tunnelPolicy.tunnelId().equals(tunnel.id())) { log.warn("Cannot remove the tunnel used by a policy"); return TunnelHandler.Result.TUNNEL_IN_USE; } } } return tunnelHandler.removeTunnel(tunnel); } @Override public PolicyHandler.Result removePolicy(Policy policy) { return policyHandler.removePolicy(policy); } @Override public PolicyHandler.Result createPolicy(Policy policy) { return policyHandler.createPolicy(policy); } @Override public List<Policy> getPolicies() { return policyHandler.getPolicies(); } @Override public Set<L2TunnelDescription> getL2TunnelDescriptions(boolean pending) { return l2TunnelHandler.getL2Descriptions(pending); } @Override public List<L2Tunnel> getL2Tunnels() { return l2TunnelHandler.getL2Tunnels(); } @Override public List<L2TunnelPolicy> getL2Policies() { return l2TunnelHandler.getL2Policies(); } @Override @Deprecated public L2TunnelHandler.Result addPseudowiresBulk(List<DefaultL2TunnelDescription> bulkPseudowires) { // get both added and pending pseudowires List<L2TunnelDescription> pseudowires = new ArrayList<>(); pseudowires.addAll(l2TunnelHandler.getL2Descriptions(false)); pseudowires.addAll(l2TunnelHandler.getL2Descriptions(true)); pseudowires.addAll(bulkPseudowires); Set<L2TunnelDescription> newPseudowires = new HashSet(bulkPseudowires); L2TunnelHandler.Result retRes = L2TunnelHandler.Result.SUCCESS; L2TunnelHandler.Result res; for (DefaultL2TunnelDescription pw : bulkPseudowires) { res = addPseudowire(pw); if (res != L2TunnelHandler.Result.SUCCESS) { log.error("Pseudowire with id {} can not be instantiated !", res); retRes = res; } } return retRes; } @Override public L2TunnelHandler.Result addPseudowire(L2TunnelDescription l2TunnelDescription) { return l2TunnelHandler.deployPseudowire(l2TunnelDescription); } @Override public L2TunnelHandler.Result removePseudowire(Integer pwId) { return l2TunnelHandler.tearDownPseudowire(pwId); } @Override public void rerouteNetwork() { cfgListener.configureNetwork(); } @Override public Map<DeviceId, Set<IpPrefix>> getDeviceSubnetMap() { Map<DeviceId, Set<IpPrefix>> deviceSubnetMap = Maps.newHashMap(); deviceConfiguration.getRouters().forEach(device -> deviceSubnetMap.put(device, deviceConfiguration.getSubnets(device))); return deviceSubnetMap; } @Override public ImmutableMap<DeviceId, EcmpShortestPathGraph> getCurrentEcmpSpg() { if (defaultRoutingHandler != null) { return defaultRoutingHandler.getCurrentEmcpSpgMap(); } else { return null; } } @Override public ImmutableMap<DestinationSetNextObjectiveStoreKey, NextNeighbors> getDestinationSet() { if (dsNextObjStore != null) { return ImmutableMap.copyOf(dsNextObjStore.entrySet()); } else { return ImmutableMap.of(); } } @Override public void verifyGroups(DeviceId id) { DefaultGroupHandler gh = groupHandlerMap.get(id); if (gh != null) { gh.triggerBucketCorrector(); } } @Override public ImmutableMap<Link, Boolean> getSeenLinks() { return linkHandler.getSeenLinks(); } @Override public ImmutableMap<DeviceId, Set<PortNumber>> getDownedPortState() { return linkHandler.getDownedPorts(); } @Override public Map<McastStoreKey, Integer> getMcastNextIds(IpAddress mcastIp) { return mcastHandler.getMcastNextIds(mcastIp); } @Override public Map<McastStoreKey, McastRole> getMcastRoles(IpAddress mcastIp) { return mcastHandler.getMcastRoles(mcastIp); } @Override public Map<ConnectPoint, List<ConnectPoint>> getMcastPaths(IpAddress mcastIp) { return mcastHandler.getMcastPaths(mcastIp); } /** * Extracts the application ID from the manager. * * @return application ID */ public ApplicationId appId() { return appId; } /** * Returns the device configuration. * * @return device configuration */ public DeviceConfiguration deviceConfiguration() { return deviceConfiguration; } /** * Per device next objective ID store with (device id + destination set) as key. * Used to keep track on MPLS group information. * * @return next objective ID store */ public EventuallyConsistentMap<DestinationSetNextObjectiveStoreKey, NextNeighbors> dsNextObjStore() { return dsNextObjStore; } /** * Per device next objective ID store with (device id + vlanid) as key. * Used to keep track on L2 flood group information. * * @return vlan next object store */ public EventuallyConsistentMap<VlanNextObjectiveStoreKey, Integer> vlanNextObjStore() { return vlanNextObjStore; } /** * Per device next objective ID store with (device id + port + treatment + meta) as key. * Used to keep track on L2 interface group and L3 unicast group information. * * @return port next object store. */ public EventuallyConsistentMap<PortNextObjectiveStoreKey, Integer> portNextObjStore() { return portNextObjStore; } /** * Returns the MPLS-ECMP configuration which indicates whether ECMP on * labeled packets should be programmed or not. * * @return MPLS-ECMP value */ public boolean getMplsEcmp() { SegmentRoutingAppConfig segmentRoutingAppConfig = cfgService .getConfig(this.appId, SegmentRoutingAppConfig.class); return segmentRoutingAppConfig != null && segmentRoutingAppConfig.mplsEcmp(); } /** * Returns the tunnel object with the tunnel ID. * * @param tunnelId Tunnel ID * @return Tunnel reference */ public Tunnel getTunnel(String tunnelId) { return tunnelHandler.getTunnel(tunnelId); } // TODO Consider moving these to InterfaceService /** * Returns untagged VLAN configured on given connect point. * <p> * Only returns the first match if there are multiple untagged VLAN configured * on the connect point. * * @param connectPoint connect point * @return untagged VLAN or null if not configured */ VlanId getUntaggedVlanId(ConnectPoint connectPoint) { return interfaceService.getInterfacesByPort(connectPoint).stream() .filter(intf -> !intf.vlanUntagged().equals(VlanId.NONE)) .map(Interface::vlanUntagged) .findFirst().orElse(null); } /** * Returns tagged VLAN configured on given connect point. * <p> * Returns all matches if there are multiple tagged VLAN configured * on the connect point. * * @param connectPoint connect point * @return tagged VLAN or empty set if not configured */ Set<VlanId> getTaggedVlanId(ConnectPoint connectPoint) { Set<Interface> interfaces = interfaceService.getInterfacesByPort(connectPoint); return interfaces.stream() .map(Interface::vlanTagged) .flatMap(Set::stream) .collect(Collectors.toSet()); } /** * Returns native VLAN configured on given connect point. * <p> * Only returns the first match if there are multiple native VLAN configured * on the connect point. * * @param connectPoint connect point * @return native VLAN or null if not configured */ VlanId getNativeVlanId(ConnectPoint connectPoint) { Set<Interface> interfaces = interfaceService.getInterfacesByPort(connectPoint); return interfaces.stream() .filter(intf -> !intf.vlanNative().equals(VlanId.NONE)) .map(Interface::vlanNative) .findFirst() .orElse(null); } /** * Returns internal VLAN for untagged hosts on given connect point. * <p> * The internal VLAN is either vlan-untagged for an access port, * or vlan-native for a trunk port. * * @param connectPoint connect point * @return internal VLAN or null if both vlan-untagged and vlan-native are undefined */ public VlanId getInternalVlanId(ConnectPoint connectPoint) { VlanId untaggedVlanId = getUntaggedVlanId(connectPoint); VlanId nativeVlanId = getNativeVlanId(connectPoint); return untaggedVlanId != null ? untaggedVlanId : nativeVlanId; } /** * Returns optional pair device ID of given device. * * @param deviceId device ID * @return optional pair device ID. Might be empty if pair device is not configured */ Optional<DeviceId> getPairDeviceId(DeviceId deviceId) { SegmentRoutingDeviceConfig deviceConfig = cfgService.getConfig(deviceId, SegmentRoutingDeviceConfig.class); return Optional.ofNullable(deviceConfig).map(SegmentRoutingDeviceConfig::pairDeviceId); } /** * Returns optional pair device local port of given device. * * @param deviceId device ID * @return optional pair device ID. Might be empty if pair device is not configured */ Optional<PortNumber> getPairLocalPorts(DeviceId deviceId) { SegmentRoutingDeviceConfig deviceConfig = cfgService.getConfig(deviceId, SegmentRoutingDeviceConfig.class); return Optional.ofNullable(deviceConfig).map(SegmentRoutingDeviceConfig::pairLocalPort); } /** * Determine if current instance is the master of given connect point. * * @param cp connect point * @return true if current instance is the master of given connect point */ public boolean isMasterOf(ConnectPoint cp) { boolean isMaster = mastershipService.isLocalMaster(cp.deviceId()); if (!isMaster) { log.debug(NOT_MASTER, cp); } return isMaster; } /** * Returns locations of given resolved route. * * @param resolvedRoute resolved route * @return locations of nexthop. Might be empty if next hop is not found */ Set<ConnectPoint> nextHopLocations(ResolvedRoute resolvedRoute) { HostId hostId = HostId.hostId(resolvedRoute.nextHopMac(), resolvedRoute.nextHopVlan()); return Optional.ofNullable(hostService.getHost(hostId)) .map(Host::locations).orElse(Sets.newHashSet()) .stream().map(l -> (ConnectPoint) l).collect(Collectors.toSet()); } /** * Returns vlan port map of given device. * * @param deviceId device id * @return vlan-port multimap */ public Multimap<VlanId, PortNumber> getVlanPortMap(DeviceId deviceId) { HashMultimap<VlanId, PortNumber> vlanPortMap = HashMultimap.create(); interfaceService.getInterfaces().stream() .filter(intf -> intf.connectPoint().deviceId().equals(deviceId)) .forEach(intf -> { vlanPortMap.put(intf.vlanUntagged(), intf.connectPoint().port()); intf.vlanTagged().forEach(vlanTagged -> vlanPortMap.put(vlanTagged, intf.connectPoint().port()) ); vlanPortMap.put(intf.vlanNative(), intf.connectPoint().port()); }); vlanPortMap.removeAll(VlanId.NONE); return vlanPortMap; } /** * Returns the next objective ID for the given vlan id. It is expected * that the next-objective has been pre-created from configuration. * * @param deviceId Device ID * @param vlanId VLAN ID * @return next objective ID or -1 if it was not found */ int getVlanNextObjectiveId(DeviceId deviceId, VlanId vlanId) { if (groupHandlerMap.get(deviceId) != null) { log.trace("getVlanNextObjectiveId query in device {}", deviceId); return groupHandlerMap.get(deviceId).getVlanNextObjectiveId(vlanId); } else { log.warn("getVlanNextObjectiveId query - groupHandler for " + "device {} not found", deviceId); return -1; } } /** * Returns the next objective ID for the given portNumber, given the treatment. * There could be multiple different treatments to the same outport, which * would result in different objectives. If the next object does not exist, * and should be created, a new one is created and its id is returned. * * @param deviceId Device ID * @param portNum port number on device for which NextObjective is queried * @param treatment the actions to apply on the packets (should include outport) * @param meta metadata passed into the creation of a Next Objective if necessary * @param createIfMissing true if a next object should be created if not found * @return next objective ID or -1 if an error occurred during retrieval or creation */ public int getPortNextObjectiveId(DeviceId deviceId, PortNumber portNum, TrafficTreatment treatment, TrafficSelector meta, boolean createIfMissing) { DefaultGroupHandler ghdlr = groupHandlerMap.get(deviceId); if (ghdlr != null) { return ghdlr.getPortNextObjectiveId(portNum, treatment, meta, createIfMissing); } else { log.warn("getPortNextObjectiveId query - groupHandler for device {}" + " not found", deviceId); return -1; } } /** * Returns the group handler object for the specified device id. * * @param devId the device identifier * @return the groupHandler object for the device id, or null if not found */ DefaultGroupHandler getGroupHandler(DeviceId devId) { return groupHandlerMap.get(devId); } /** * Returns the default routing handler object. * * @return the default routing handler object */ public DefaultRoutingHandler getRoutingHandler() { return defaultRoutingHandler; } private class InternalPacketProcessor implements PacketProcessor { @Override public void process(PacketContext context) { if (context.isHandled()) { return; } InboundPacket pkt = context.inPacket(); Ethernet ethernet = pkt.parsed(); if (ethernet == null) { return; } log.trace("Rcvd pktin from {}: {}", context.inPacket().receivedFrom(), ethernet); if (ethernet.getEtherType() == TYPE_ARP) { log.warn("Received unexpected ARP packet on {}", context.inPacket().receivedFrom()); log.trace("{}", ethernet); return; } else if (ethernet.getEtherType() == Ethernet.TYPE_IPV4) { IPv4 ipv4Packet = (IPv4) ethernet.getPayload(); //ipHandler.addToPacketBuffer(ipv4Packet); if (ipv4Packet.getProtocol() == IPv4.PROTOCOL_ICMP) { icmpHandler.processIcmp(ethernet, pkt.receivedFrom()); } else { // NOTE: We don't support IP learning at this moment so this // is not necessary. Also it causes duplication of DHCP packets. // ipHandler.processPacketIn(ipv4Packet, pkt.receivedFrom()); } } else if (ethernet.getEtherType() == Ethernet.TYPE_IPV6) { IPv6 ipv6Packet = (IPv6) ethernet.getPayload(); //ipHandler.addToPacketBuffer(ipv6Packet); // We deal with the packet only if the packet is a ICMP6 ECHO/REPLY if (ipv6Packet.getNextHeader() == IPv6.PROTOCOL_ICMP6) { ICMP6 icmp6Packet = (ICMP6) ipv6Packet.getPayload(); if (icmp6Packet.getIcmpType() == ICMP6.ECHO_REQUEST || icmp6Packet.getIcmpType() == ICMP6.ECHO_REPLY) { icmpHandler.processIcmpv6(ethernet, pkt.receivedFrom()); } else { log.trace("Received ICMPv6 0x{} - not handled", Integer.toHexString(icmp6Packet.getIcmpType() & 0xff)); } } else { // NOTE: We don't support IP learning at this moment so this // is not necessary. Also it causes duplication of DHCPv6 packets. // ipHandler.processPacketIn(ipv6Packet, pkt.receivedFrom()); } } } } private class InternalEventHandler implements Runnable { private Event event; InternalEventHandler(Event event) { this.event = event; } @Override public void run() { try { // TODO We should also change SR routing and PW to listen to TopologyEvents if (event.type() == LinkEvent.Type.LINK_ADDED || event.type() == LinkEvent.Type.LINK_UPDATED) { linkHandler.processLinkAdded((Link) event.subject()); } else if (event.type() == LinkEvent.Type.LINK_REMOVED) { linkHandler.processLinkRemoved((Link) event.subject()); } else if (event.type() == DeviceEvent.Type.DEVICE_ADDED || event.type() == DeviceEvent.Type.DEVICE_AVAILABILITY_CHANGED || event.type() == DeviceEvent.Type.DEVICE_UPDATED) { DeviceId deviceId = ((Device) event.subject()).id(); if (deviceService.isAvailable(deviceId)) { log.info("** DEVICE UP Processing device event {} " + "for available device {}", event.type(), ((Device) event.subject()).id()); processDeviceAdded((Device) event.subject()); } else { log.info(" ** DEVICE DOWN Processing device event {}" + " for unavailable device {}", event.type(), ((Device) event.subject()).id()); processDeviceRemoved((Device) event.subject()); } } else if (event.type() == DeviceEvent.Type.PORT_ADDED) { // typically these calls come when device is added first time // so port filtering rules are handled at the device_added event. // port added calls represent all ports on the device, // enabled or not. log.trace("** PORT ADDED {}/{} -> {}", ((DeviceEvent) event).subject().id(), ((DeviceEvent) event).port().number(), event.type()); } else if (event.type() == DeviceEvent.Type.PORT_UPDATED) { // these calls happen for every subsequent event // ports enabled, disabled, switch goes away, comes back log.info("** PORT UPDATED {}/{} -> {}", event.subject(), ((DeviceEvent) event).port(), event.type()); processPortUpdated(((Device) event.subject()), ((DeviceEvent) event).port()); } else if (event.type() == TopologyEvent.Type.TOPOLOGY_CHANGED) { // Process topology event, needed for all modules relying on // topology service for path computation TopologyEvent topologyEvent = (TopologyEvent) event; log.info("Processing topology event {}, topology age {}, reasons {}", event.type(), topologyEvent.subject().time(), topologyEvent.reasons().size()); topologyHandler.processTopologyChange(topologyEvent.reasons()); } else if (event.type() == HostEvent.Type.HOST_ADDED) { hostHandler.processHostAddedEvent((HostEvent) event); } else if (event.type() == HostEvent.Type.HOST_MOVED) { hostHandler.processHostMovedEvent((HostEvent) event); routeHandler.processHostMovedEvent((HostEvent) event); } else if (event.type() == HostEvent.Type.HOST_REMOVED) { hostHandler.processHostRemovedEvent((HostEvent) event); } else if (event.type() == HostEvent.Type.HOST_UPDATED) { hostHandler.processHostUpdatedEvent((HostEvent) event); } else if (event.type() == RouteEvent.Type.ROUTE_ADDED) { routeHandler.processRouteAdded((RouteEvent) event); } else if (event.type() == RouteEvent.Type.ROUTE_UPDATED) { routeHandler.processRouteUpdated((RouteEvent) event); } else if (event.type() == RouteEvent.Type.ROUTE_REMOVED) { routeHandler.processRouteRemoved((RouteEvent) event); } else if (event.type() == RouteEvent.Type.ALTERNATIVE_ROUTES_CHANGED) { routeHandler.processAlternativeRoutesChanged((RouteEvent) event); } else if (event.type() == McastEvent.Type.SOURCES_ADDED || event.type() == McastEvent.Type.SOURCES_REMOVED || event.type() == McastEvent.Type.SINKS_ADDED || event.type() == McastEvent.Type.SINKS_REMOVED || event.type() == McastEvent.Type.ROUTE_ADDED || event.type() == McastEvent.Type.ROUTE_REMOVED) { mcastHandler.processMcastEvent((McastEvent) event); } else if (event.type() == NetworkConfigEvent.Type.CONFIG_ADDED) { NetworkConfigEvent netcfgEvent = (NetworkConfigEvent) event; Class configClass = netcfgEvent.configClass(); if (configClass.equals(SegmentRoutingAppConfig.class)) { appCfgHandler.processAppConfigAdded(netcfgEvent); log.info("App config event .. configuring network"); cfgListener.configureNetwork(); } else if (configClass.equals(SegmentRoutingDeviceConfig.class)) { log.info("Segment Routing Device Config added for {}", event.subject()); cfgListener.configureNetwork(); } else if (configClass.equals(XConnectConfig.class)) { xConnectHandler.processXConnectConfigAdded(netcfgEvent); } else if (configClass.equals(InterfaceConfig.class)) { log.info("Interface Config added for {}", event.subject()); cfgListener.configureNetwork(); } else { log.error("Unhandled config class: {}", configClass); } } else if (event.type() == NetworkConfigEvent.Type.CONFIG_UPDATED) { NetworkConfigEvent netcfgEvent = (NetworkConfigEvent) event; Class configClass = netcfgEvent.configClass(); if (configClass.equals(SegmentRoutingAppConfig.class)) { appCfgHandler.processAppConfigUpdated(netcfgEvent); log.info("App config event .. configuring network"); cfgListener.configureNetwork(); } else if (configClass.equals(SegmentRoutingDeviceConfig.class)) { log.info("Segment Routing Device Config updated for {}", event.subject()); createOrUpdateDeviceConfiguration(); } else if (configClass.equals(XConnectConfig.class)) { xConnectHandler.processXConnectConfigUpdated(netcfgEvent); } else if (configClass.equals(InterfaceConfig.class)) { log.info("Interface Config updated for {}", event.subject()); createOrUpdateDeviceConfiguration(); updateInterface((InterfaceConfig) netcfgEvent.config().get(), (InterfaceConfig) netcfgEvent.prevConfig().get()); } else { log.error("Unhandled config class: {}", configClass); } } else if (event.type() == NetworkConfigEvent.Type.CONFIG_REMOVED) { NetworkConfigEvent netcfgEvent = (NetworkConfigEvent) event; Class configClass = netcfgEvent.configClass(); if (configClass.equals(SegmentRoutingAppConfig.class)) { appCfgHandler.processAppConfigRemoved(netcfgEvent); log.info("App config event .. configuring network"); cfgListener.configureNetwork(); } else if (configClass.equals(SegmentRoutingDeviceConfig.class)) { // TODO Handle sr device config removal log.info("SegmentRoutingDeviceConfig removal is not handled in current implementation"); } else if (configClass.equals(XConnectConfig.class)) { xConnectHandler.processXConnectConfigRemoved(netcfgEvent); } else if (configClass.equals(InterfaceConfig.class)) { // TODO Handle interface removal log.info("InterfaceConfig removal is not handled in current implementation"); } else { log.error("Unhandled config class: {}", configClass); } } else { log.warn("Unhandled event type: {}", event.type()); } } catch (Exception e) { log.error("SegmentRouting event handler thread thrown an exception: {}", e.getMessage(), e); } } } void processDeviceAdded(Device device) { log.info("** DEVICE ADDED with ID {}", device.id()); // NOTE: Punt ARP/NDP even when the device is not configured. // Host learning without network config is required for CORD config generator. routingRulePopulator.populateIpPunts(device.id()); routingRulePopulator.populateArpNdpPunts(device.id()); if (deviceConfiguration == null || !deviceConfiguration.isConfigured(device.id())) { log.warn("Device configuration unavailable. Device {} will be " + "processed after configuration.", device.id()); return; } processDeviceAddedInternal(device.id()); } private void processDeviceAddedInternal(DeviceId deviceId) { // Irrespective of whether the local is a MASTER or not for this device, // we need to create a SR-group-handler instance. This is because in a // multi-instance setup, any instance can initiate forwarding/next-objectives // for any switch (even if this instance is a SLAVE or not even connected // to the switch). To handle this, a default-group-handler instance is necessary // per switch. log.debug("Current groupHandlerMap devs: {}", groupHandlerMap.keySet()); if (groupHandlerMap.get(deviceId) == null) { DefaultGroupHandler groupHandler; try { groupHandler = DefaultGroupHandler. createGroupHandler(deviceId, appId, deviceConfiguration, linkService, flowObjectiveService, this); } catch (DeviceConfigNotFoundException e) { log.warn(e.getMessage() + " Aborting processDeviceAdded."); return; } log.debug("updating groupHandlerMap with new grpHdlr for device: {}", deviceId); groupHandlerMap.put(deviceId, groupHandler); } if (mastershipService.isLocalMaster(deviceId)) { defaultRoutingHandler.populatePortAddressingRules(deviceId); xConnectHandler.init(deviceId); DefaultGroupHandler groupHandler = groupHandlerMap.get(deviceId); groupHandler.createGroupsFromVlanConfig(); routingRulePopulator.populateSubnetBroadcastRule(deviceId); } appCfgHandler.init(deviceId); hostHandler.init(deviceId); routeHandler.init(deviceId); } private void processDeviceRemoved(Device device) { dsNextObjStore.entrySet().stream() .filter(entry -> entry.getKey().deviceId().equals(device.id())) .forEach(entry -> dsNextObjStore.remove(entry.getKey())); vlanNextObjStore.entrySet().stream() .filter(entry -> entry.getKey().deviceId().equals(device.id())) .forEach(entry -> vlanNextObjStore.remove(entry.getKey())); portNextObjStore.entrySet().stream() .filter(entry -> entry.getKey().deviceId().equals(device.id())) .forEach(entry -> portNextObjStore.remove(entry.getKey())); linkHandler.processDeviceRemoved(device); DefaultGroupHandler gh = groupHandlerMap.remove(device.id()); if (gh != null) { gh.shutdown(); } // Note that a switch going down is associated with all of its links // going down as well, but it is treated as a single switch down event // while the link-downs are ignored. We cannot rely on the ordering of // events - i.e we cannot expect all link-downs to come before the // switch down - so we purge all seen-links for the switch before // handling route-path changes for the switch-down defaultRoutingHandler .populateRoutingRulesForLinkStatusChange(null, null, device.id(), true); defaultRoutingHandler.purgeEcmpGraph(device.id()); xConnectHandler.removeDevice(device.id()); // Cleanup all internal groupHandler stores for this device. Should be // done after all rerouting or rehashing has been completed groupHandlerMap.entrySet() .forEach(entry -> entry.getValue().cleanUpForNeighborDown(device.id())); } private void processPortUpdated(Device device, Port port) { if (deviceConfiguration == null || !deviceConfiguration.isConfigured(device.id())) { log.warn("Device configuration uploading. Not handling port event for" + "dev: {} port: {}", device.id(), port.number()); return; } if (!mastershipService.isLocalMaster(device.id())) { log.debug("Not master for dev:{} .. not handling port updated event" + "for port {}", device.id(), port.number()); return; } // first we handle filtering rules associated with the port if (port.isEnabled()) { log.info("Switchport {}/{} enabled..programming filters", device.id(), port.number()); routingRulePopulator.processSinglePortFilters(device.id(), port.number(), true); } else { log.info("Switchport {}/{} disabled..removing filters", device.id(), port.number()); routingRulePopulator.processSinglePortFilters(device.id(), port.number(), false); } // portUpdated calls are for ports that have gone down or up. For switch // to switch ports, link-events should take care of any re-routing or // group editing necessary for port up/down. Here we only process edge ports // that are already configured. ConnectPoint cp = new ConnectPoint(device.id(), port.number()); VlanId untaggedVlan = getUntaggedVlanId(cp); VlanId nativeVlan = getNativeVlanId(cp); Set<VlanId> taggedVlans = getTaggedVlanId(cp); if (untaggedVlan == null && nativeVlan == null && taggedVlans.isEmpty()) { log.debug("Not handling port updated event for non-edge port (unconfigured) " + "dev/port: {}/{}", device.id(), port.number()); return; } if (untaggedVlan != null) { processEdgePort(device, port, untaggedVlan, true); } if (nativeVlan != null) { processEdgePort(device, port, nativeVlan, true); } if (!taggedVlans.isEmpty()) { taggedVlans.forEach(tag -> processEdgePort(device, port, tag, false)); } } private void processEdgePort(Device device, Port port, VlanId vlanId, boolean popVlan) { boolean portUp = port.isEnabled(); if (portUp) { log.info("Device:EdgePort {}:{} is enabled in vlan: {}", device.id(), port.number(), vlanId); hostHandler.processPortUp(new ConnectPoint(device.id(), port.number())); } else { log.info("Device:EdgePort {}:{} is disabled in vlan: {}", device.id(), port.number(), vlanId); } DefaultGroupHandler groupHandler = groupHandlerMap.get(device.id()); if (groupHandler != null) { groupHandler.processEdgePort(port.number(), vlanId, popVlan, portUp); } else { log.warn("Group handler not found for dev:{}. Not handling edge port" + " {} event for port:{}", device.id(), (portUp) ? "UP" : "DOWN", port.number()); } } private void createOrUpdateDeviceConfiguration() { if (deviceConfiguration == null) { log.info("Creating new DeviceConfiguration"); deviceConfiguration = new DeviceConfiguration(this); } else { log.info("Updating DeviceConfiguration"); deviceConfiguration.updateConfig(); } } /** * Registers the given connect point with the NRS, this is necessary * to receive the NDP and ARP packets from the NRS. * * @param portToRegister connect point to register */ public void registerConnectPoint(ConnectPoint portToRegister) { neighbourResolutionService.registerNeighbourHandler( portToRegister, neighbourHandler, appId ); } private class InternalConfigListener implements NetworkConfigListener { private static final long PROGRAM_DELAY = 2; SegmentRoutingManager srManager; /** * Constructs the internal network config listener. * * @param srManager segment routing manager */ InternalConfigListener(SegmentRoutingManager srManager) { this.srManager = srManager; } /** * Reads network config and initializes related data structure accordingly. */ void configureNetwork() { log.info("Configuring network ..."); createOrUpdateDeviceConfiguration(); arpHandler = new ArpHandler(srManager); icmpHandler = new IcmpHandler(srManager); ipHandler = new IpHandler(srManager); routingRulePopulator = new RoutingRulePopulator(srManager); defaultRoutingHandler = new DefaultRoutingHandler(srManager); tunnelHandler = new TunnelHandler(linkService, deviceConfiguration, groupHandlerMap, tunnelStore); policyHandler = new PolicyHandler(appId, deviceConfiguration, flowObjectiveService, tunnelHandler, policyStore); // add a small delay to absorb multiple network config added notifications if (!programmingScheduled.get()) { log.info("Buffering config calls for {} secs", PROGRAM_DELAY); programmingScheduled.set(true); mainEventExecutor.schedule(new ConfigChange(), PROGRAM_DELAY, TimeUnit.SECONDS); } mcastHandler.init(); } @Override public void event(NetworkConfigEvent event) { checkState(appCfgHandler != null, "NetworkConfigEventHandler is not initialized"); checkState(xConnectHandler != null, "XConnectHandler is not initialized"); switch (event.type()) { case CONFIG_ADDED: case CONFIG_UPDATED: case CONFIG_REMOVED: log.trace("Schedule Network Config event {}", event); mainEventExecutor.execute(new InternalEventHandler(event)); break; default: break; } } @Override public boolean isRelevant(NetworkConfigEvent event) { if (event.type() == CONFIG_REGISTERED || event.type() == CONFIG_UNREGISTERED) { log.debug("Ignore event {} due to type mismatch", event); return false; } if (!event.configClass().equals(SegmentRoutingDeviceConfig.class) && !event.configClass().equals(SegmentRoutingAppConfig.class) && !event.configClass().equals(InterfaceConfig.class) && !event.configClass().equals(XConnectConfig.class)) { log.debug("Ignore event {} due to class mismatch", event); return false; } return true; } private final class ConfigChange implements Runnable { @Override public void run() { programmingScheduled.set(false); log.info("Reacting to config changes after buffer delay"); for (Device device : deviceService.getDevices()) { processDeviceAdded(device); } defaultRoutingHandler.startPopulationProcess(); } } } private class InternalLinkListener implements LinkListener { @Override public void event(LinkEvent event) { if (event.type() == LinkEvent.Type.LINK_ADDED || event.type() == LinkEvent.Type.LINK_UPDATED || event.type() == LinkEvent.Type.LINK_REMOVED) { log.trace("Schedule Link event {}", event); mainEventExecutor.execute(new InternalEventHandler(event)); } } } private class InternalDeviceListener implements DeviceListener { @Override public void event(DeviceEvent event) { switch (event.type()) { case DEVICE_ADDED: case PORT_UPDATED: case PORT_ADDED: case DEVICE_UPDATED: case DEVICE_AVAILABILITY_CHANGED: log.trace("Schedule Device event {}", event); mainEventExecutor.execute(new InternalEventHandler(event)); break; default: } } } private class InternalTopologyListener implements TopologyListener { @Override public void event(TopologyEvent event) { switch (event.type()) { case TOPOLOGY_CHANGED: log.trace("Schedule Topology event {}", event); mainEventExecutor.execute(new InternalEventHandler(event)); break; default: } } } private class InternalHostListener implements HostListener { @Override public void event(HostEvent event) { switch (event.type()) { case HOST_ADDED: case HOST_MOVED: case HOST_REMOVED: case HOST_UPDATED: log.trace("Schedule Host event {}", event); hostEventExecutor.execute(new InternalEventHandler(event)); break; default: log.warn("Unsupported host event type: {}", event.type()); break; } } } private class InternalMcastListener implements McastListener { @Override public void event(McastEvent event) { switch (event.type()) { case SOURCES_ADDED: case SOURCES_REMOVED: case SINKS_ADDED: case SINKS_REMOVED: case ROUTE_REMOVED: log.trace("Schedule Mcast event {}", event); mcastEventExecutor.execute(new InternalEventHandler(event)); break; case ROUTE_ADDED: default: log.warn("Unsupported mcast event type: {}", event.type()); break; } } } private class InternalRouteEventListener implements RouteListener { @Override public void event(RouteEvent event) { switch (event.type()) { case ROUTE_ADDED: case ROUTE_UPDATED: case ROUTE_REMOVED: case ALTERNATIVE_ROUTES_CHANGED: log.trace("Schedule Route event {}", event); routeEventExecutor.execute(new InternalEventHandler(event)); break; default: log.warn("Unsupported route event type: {}", event.type()); break; } } } private void updateInterface(InterfaceConfig conf, InterfaceConfig prevConf) { try { Set<Interface> intfs = conf.getInterfaces(); Set<Interface> prevIntfs = prevConf.getInterfaces(); // Now we only handle one interface config at each port. if (intfs.size() != 1 || prevIntfs.size() != 1) { log.warn("Interface update aborted - one at a time is allowed, " + "but {} / {}(prev) received.", intfs.size(), prevIntfs.size()); return; } Interface intf = intfs.stream().findFirst().get(); Interface prevIntf = prevIntfs.stream().findFirst().get(); DeviceId deviceId = intf.connectPoint().deviceId(); PortNumber portNum = intf.connectPoint().port(); removeSubnetConfig(prevIntf.connectPoint(), Sets.difference(new HashSet<>(prevIntf.ipAddressesList()), new HashSet<>(intf.ipAddressesList()))); if (!prevIntf.vlanNative().equals(VlanId.NONE) && !prevIntf.vlanNative().equals(intf.vlanUntagged()) && !prevIntf.vlanNative().equals(intf.vlanNative())) { if (intf.vlanTagged().contains(prevIntf.vlanNative())) { // Update filtering objective and L2IG group bucket updatePortVlanTreatment(deviceId, portNum, prevIntf.vlanNative(), false); } else { // RemoveVlanNative updateVlanConfigInternal(deviceId, portNum, prevIntf.vlanNative(), true, false); } } if (!prevIntf.vlanUntagged().equals(VlanId.NONE) && !prevIntf.vlanUntagged().equals(intf.vlanUntagged()) && !prevIntf.vlanUntagged().equals(intf.vlanNative())) { if (intf.vlanTagged().contains(prevIntf.vlanUntagged())) { // Update filtering objective and L2IG group bucket updatePortVlanTreatment(deviceId, portNum, prevIntf.vlanUntagged(), false); } else { // RemoveVlanUntagged updateVlanConfigInternal(deviceId, portNum, prevIntf.vlanUntagged(), true, false); } } if (!prevIntf.vlanTagged().isEmpty() && !intf.vlanTagged().equals(prevIntf.vlanTagged())) { // RemoveVlanTagged Sets.difference(prevIntf.vlanTagged(), intf.vlanTagged()).stream() .filter(i -> !intf.vlanUntagged().equals(i)) .filter(i -> !intf.vlanNative().equals(i)) .forEach(vlanId -> updateVlanConfigInternal( deviceId, portNum, vlanId, false, false)); } if (!intf.vlanNative().equals(VlanId.NONE) && !prevIntf.vlanNative().equals(intf.vlanNative()) && !prevIntf.vlanUntagged().equals(intf.vlanNative())) { if (prevIntf.vlanTagged().contains(intf.vlanNative())) { // Update filtering objective and L2IG group bucket updatePortVlanTreatment(deviceId, portNum, intf.vlanNative(), true); } else { // AddVlanNative updateVlanConfigInternal(deviceId, portNum, intf.vlanNative(), true, true); } } if (!intf.vlanTagged().isEmpty() && !intf.vlanTagged().equals(prevIntf.vlanTagged())) { // AddVlanTagged Sets.difference(intf.vlanTagged(), prevIntf.vlanTagged()).stream() .filter(i -> !prevIntf.vlanUntagged().equals(i)) .filter(i -> !prevIntf.vlanNative().equals(i)) .forEach(vlanId -> updateVlanConfigInternal( deviceId, portNum, vlanId, false, true) ); } if (!intf.vlanUntagged().equals(VlanId.NONE) && !prevIntf.vlanUntagged().equals(intf.vlanUntagged()) && !prevIntf.vlanNative().equals(intf.vlanUntagged())) { if (prevIntf.vlanTagged().contains(intf.vlanUntagged())) { // Update filtering objective and L2IG group bucket updatePortVlanTreatment(deviceId, portNum, intf.vlanUntagged(), true); } else { // AddVlanUntagged updateVlanConfigInternal(deviceId, portNum, intf.vlanUntagged(), true, true); } } addSubnetConfig(prevIntf.connectPoint(), Sets.difference(new HashSet<>(intf.ipAddressesList()), new HashSet<>(prevIntf.ipAddressesList()))); } catch (ConfigException e) { log.error("Error in configuration"); } } private void updatePortVlanTreatment(DeviceId deviceId, PortNumber portNum, VlanId vlanId, boolean pushVlan) { DefaultGroupHandler grpHandler = getGroupHandler(deviceId); if (grpHandler == null) { log.warn("Failed to retrieve group handler for device {}", deviceId); return; } // Update filtering objective for a single port routingRulePopulator.updateSinglePortFilters(deviceId, portNum, !pushVlan, vlanId, false); routingRulePopulator.updateSinglePortFilters(deviceId, portNum, pushVlan, vlanId, true); if (getVlanNextObjectiveId(deviceId, vlanId) != -1) { // Update L2IG bucket of the port grpHandler.updateL2InterfaceGroupBucket(portNum, vlanId, pushVlan); } else { log.warn("Failed to retrieve next objective for vlan {} in device {}:{}", vlanId, deviceId, portNum); } } private void updateVlanConfigInternal(DeviceId deviceId, PortNumber portNum, VlanId vlanId, boolean pushVlan, boolean install) { DefaultGroupHandler grpHandler = getGroupHandler(deviceId); if (grpHandler == null) { log.warn("Failed to retrieve group handler for device {}", deviceId); return; } // Update filtering objective for a single port routingRulePopulator.updateSinglePortFilters(deviceId, portNum, pushVlan, vlanId, install); // Update filtering objective for multicast ingress port mcastHandler.updateFilterToDevice(deviceId, portNum, vlanId, install); int nextId = getVlanNextObjectiveId(deviceId, vlanId); if (nextId != -1 && !install) { // Update next objective for a single port as an output port // Remove a single port from L2FG grpHandler.updateGroupFromVlanConfiguration(vlanId, portNum, nextId, install); // Remove L2 Bridging rule and L3 Unicast rule to the host hostHandler.processIntfVlanUpdatedEvent(deviceId, portNum, vlanId, pushVlan, install); // Remove broadcast forwarding rule and corresponding L2FG for VLAN // only if there is no port configured on that VLAN ID if (!getVlanPortMap(deviceId).containsKey(vlanId)) { // Remove broadcast forwarding rule for the VLAN routingRulePopulator.updateSubnetBroadcastRule(deviceId, vlanId, install); // Remove L2FG for VLAN grpHandler.removeBcastGroupFromVlan(deviceId, portNum, vlanId, pushVlan); } else { // Remove L2IG of the port grpHandler.removePortNextObjective(deviceId, portNum, vlanId, pushVlan); } } else if (install) { if (nextId != -1) { // Add a single port to L2FG grpHandler.updateGroupFromVlanConfiguration(vlanId, portNum, nextId, install); } else { // Create L2FG for VLAN grpHandler.createBcastGroupFromVlan(vlanId, Collections.singleton(portNum)); routingRulePopulator.updateSubnetBroadcastRule(deviceId, vlanId, install); } hostHandler.processIntfVlanUpdatedEvent(deviceId, portNum, vlanId, pushVlan, install); } else { log.warn("Failed to retrieve next objective for vlan {} in device {}:{}", vlanId, deviceId, portNum); } } private void removeSubnetConfig(ConnectPoint cp, Set<InterfaceIpAddress> ipAddressSet) { Set<IpPrefix> ipPrefixSet = ipAddressSet.stream(). map(InterfaceIpAddress::subnetAddress).collect(Collectors.toSet()); Set<InterfaceIpAddress> deviceIntfIpAddrs = interfaceService.getInterfaces().stream() .filter(intf -> intf.connectPoint().deviceId().equals(cp.deviceId())) .filter(intf -> !intf.connectPoint().equals(cp)) .flatMap(intf -> intf.ipAddressesList().stream()) .collect(Collectors.toSet()); // 1. Partial subnet population // Remove routing rules for removed subnet from previous configuration, // which does not also exist in other interfaces in the same device Set<IpPrefix> deviceIpPrefixSet = deviceIntfIpAddrs.stream() .map(InterfaceIpAddress::subnetAddress) .collect(Collectors.toSet()); defaultRoutingHandler.revokeSubnet( ipPrefixSet.stream() .filter(ipPrefix -> !deviceIpPrefixSet.contains(ipPrefix)) .collect(Collectors.toSet())); // 2. Interface IP punts // Remove IP punts for old Intf address Set<IpAddress> deviceIpAddrs = deviceIntfIpAddrs.stream() .map(InterfaceIpAddress::ipAddress) .collect(Collectors.toSet()); ipAddressSet.stream() .map(InterfaceIpAddress::ipAddress) .filter(interfaceIpAddress -> !deviceIpAddrs.contains(interfaceIpAddress)) .forEach(interfaceIpAddress -> routingRulePopulator.revokeSingleIpPunts( cp.deviceId(), interfaceIpAddress)); // 3. Host unicast routing rule // Remove unicast routing rule hostHandler.processIntfIpUpdatedEvent(cp, ipPrefixSet, false); } private void addSubnetConfig(ConnectPoint cp, Set<InterfaceIpAddress> ipAddressSet) { Set<IpPrefix> ipPrefixSet = ipAddressSet.stream(). map(InterfaceIpAddress::subnetAddress).collect(Collectors.toSet()); Set<InterfaceIpAddress> deviceIntfIpAddrs = interfaceService.getInterfaces().stream() .filter(intf -> intf.connectPoint().deviceId().equals(cp.deviceId())) .filter(intf -> !intf.connectPoint().equals(cp)) .flatMap(intf -> intf.ipAddressesList().stream()) .collect(Collectors.toSet()); // 1. Partial subnet population // Add routing rules for newly added subnet, which does not also exist in // other interfaces in the same device Set<IpPrefix> deviceIpPrefixSet = deviceIntfIpAddrs.stream() .map(InterfaceIpAddress::subnetAddress) .collect(Collectors.toSet()); defaultRoutingHandler.populateSubnet( Collections.singleton(cp), ipPrefixSet.stream() .filter(ipPrefix -> !deviceIpPrefixSet.contains(ipPrefix)) .collect(Collectors.toSet())); // 2. Interface IP punts // Add IP punts for new Intf address Set<IpAddress> deviceIpAddrs = deviceIntfIpAddrs.stream() .map(InterfaceIpAddress::ipAddress) .collect(Collectors.toSet()); ipAddressSet.stream() .map(InterfaceIpAddress::ipAddress) .filter(interfaceIpAddress -> !deviceIpAddrs.contains(interfaceIpAddress)) .forEach(interfaceIpAddress -> routingRulePopulator.populateSingleIpPunts( cp.deviceId(), interfaceIpAddress)); // 3. Host unicast routing rule // Add unicast routing rule hostHandler.processIntfIpUpdatedEvent(cp, ipPrefixSet, true); } }
apps/segmentrouting/app/src/main/java/org/onosproject/segmentrouting/SegmentRoutingManager.java
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.segmentrouting; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Modified; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onlab.packet.Ethernet; import org.onlab.packet.ICMP6; import org.onlab.packet.IPv4; import org.onlab.packet.IPv6; import org.onlab.packet.IpAddress; import org.onlab.packet.IpPrefix; import org.onlab.packet.VlanId; import org.onlab.util.KryoNamespace; import org.onlab.util.Tools; import org.onosproject.cfg.ComponentConfigService; import org.onosproject.cluster.ClusterService; import org.onosproject.cluster.LeadershipService; import org.onosproject.core.ApplicationId; import org.onosproject.core.CoreService; import org.onosproject.event.Event; import org.onosproject.mastership.MastershipService; import org.onosproject.mcast.api.McastEvent; import org.onosproject.mcast.api.McastListener; import org.onosproject.mcast.api.MulticastRouteService; import org.onosproject.net.ConnectPoint; import org.onosproject.net.Device; import org.onosproject.net.DeviceId; import org.onosproject.net.Host; import org.onosproject.net.HostId; import org.onosproject.net.Link; import org.onosproject.net.Port; import org.onosproject.net.PortNumber; import org.onosproject.net.config.ConfigException; import org.onosproject.net.config.ConfigFactory; import org.onosproject.net.config.NetworkConfigEvent; import org.onosproject.net.config.NetworkConfigListener; import org.onosproject.net.config.NetworkConfigRegistry; import org.onosproject.net.config.basics.InterfaceConfig; import org.onosproject.net.config.basics.McastConfig; import org.onosproject.net.config.basics.SubjectFactories; import org.onosproject.net.device.DeviceAdminService; import org.onosproject.net.device.DeviceEvent; import org.onosproject.net.device.DeviceListener; import org.onosproject.net.device.DeviceService; import org.onosproject.net.flow.TrafficSelector; import org.onosproject.net.flow.TrafficTreatment; import org.onosproject.net.flowobjective.FlowObjectiveService; import org.onosproject.net.host.HostEvent; import org.onosproject.net.host.HostListener; import org.onosproject.net.host.HostLocationProbingService; import org.onosproject.net.host.HostService; import org.onosproject.net.host.InterfaceIpAddress; import org.onosproject.net.intf.Interface; import org.onosproject.net.intf.InterfaceService; import org.onosproject.net.link.LinkEvent; import org.onosproject.net.link.LinkListener; import org.onosproject.net.link.LinkService; import org.onosproject.net.neighbour.NeighbourResolutionService; import org.onosproject.net.packet.InboundPacket; import org.onosproject.net.packet.PacketContext; import org.onosproject.net.packet.PacketProcessor; import org.onosproject.net.packet.PacketService; import org.onosproject.net.topology.TopologyEvent; import org.onosproject.net.topology.TopologyListener; import org.onosproject.net.topology.TopologyService; import org.onosproject.routeservice.ResolvedRoute; import org.onosproject.routeservice.RouteEvent; import org.onosproject.routeservice.RouteListener; import org.onosproject.routeservice.RouteService; import org.onosproject.segmentrouting.config.DeviceConfigNotFoundException; import org.onosproject.segmentrouting.config.DeviceConfiguration; import org.onosproject.segmentrouting.config.SegmentRoutingAppConfig; import org.onosproject.segmentrouting.config.SegmentRoutingDeviceConfig; import org.onosproject.segmentrouting.config.XConnectConfig; import org.onosproject.segmentrouting.grouphandler.DefaultGroupHandler; import org.onosproject.segmentrouting.grouphandler.DestinationSet; import org.onosproject.segmentrouting.grouphandler.NextNeighbors; import org.onosproject.segmentrouting.mcast.McastHandler; import org.onosproject.segmentrouting.mcast.McastRole; import org.onosproject.segmentrouting.pwaas.DefaultL2Tunnel; import org.onosproject.segmentrouting.pwaas.DefaultL2TunnelDescription; import org.onosproject.segmentrouting.pwaas.DefaultL2TunnelHandler; import org.onosproject.segmentrouting.pwaas.DefaultL2TunnelPolicy; import org.onosproject.segmentrouting.pwaas.L2Tunnel; import org.onosproject.segmentrouting.pwaas.L2TunnelHandler; import org.onosproject.segmentrouting.pwaas.L2TunnelPolicy; import org.onosproject.segmentrouting.pwaas.L2TunnelDescription; import org.onosproject.segmentrouting.storekey.DestinationSetNextObjectiveStoreKey; import org.onosproject.segmentrouting.storekey.McastStoreKey; import org.onosproject.segmentrouting.storekey.PortNextObjectiveStoreKey; import org.onosproject.segmentrouting.storekey.VlanNextObjectiveStoreKey; import org.onosproject.segmentrouting.storekey.XConnectStoreKey; import org.onosproject.store.serializers.KryoNamespaces; import org.onosproject.store.service.EventuallyConsistentMap; import org.onosproject.store.service.EventuallyConsistentMapBuilder; import org.onosproject.store.service.StorageService; import org.onosproject.store.service.WallClockTimestamp; import org.osgi.service.component.ComponentContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collections; import java.util.Dictionary; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import static com.google.common.base.Preconditions.checkState; import static org.onlab.packet.Ethernet.TYPE_ARP; import static org.onlab.util.Tools.groupedThreads; import static org.onosproject.net.config.NetworkConfigEvent.Type.CONFIG_REGISTERED; import static org.onosproject.net.config.NetworkConfigEvent.Type.CONFIG_UNREGISTERED; /** * Segment routing manager. */ @Service @Component(immediate = true) public class SegmentRoutingManager implements SegmentRoutingService { private static Logger log = LoggerFactory.getLogger(SegmentRoutingManager.class); private static final String NOT_MASTER = "Current instance is not the master of {}. Ignore."; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) private ComponentConfigService compCfgService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) private NeighbourResolutionService neighbourResolutionService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public CoreService coreService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) PacketService packetService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) HostService hostService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) HostLocationProbingService probingService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public DeviceService deviceService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) DeviceAdminService deviceAdminService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public FlowObjectiveService flowObjectiveService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public LinkService linkService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public MastershipService mastershipService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public StorageService storageService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public MulticastRouteService multicastRouteService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public TopologyService topologyService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) RouteService routeService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public NetworkConfigRegistry cfgService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public InterfaceService interfaceService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public ClusterService clusterService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) public LeadershipService leadershipService; @Property(name = "activeProbing", boolValue = true, label = "Enable active probing to discover dual-homed hosts.") boolean activeProbing = true; ArpHandler arpHandler = null; IcmpHandler icmpHandler = null; IpHandler ipHandler = null; RoutingRulePopulator routingRulePopulator = null; ApplicationId appId; DeviceConfiguration deviceConfiguration = null; DefaultRoutingHandler defaultRoutingHandler = null; private TunnelHandler tunnelHandler = null; private PolicyHandler policyHandler = null; private InternalPacketProcessor processor = null; private InternalLinkListener linkListener = null; private InternalDeviceListener deviceListener = null; private AppConfigHandler appCfgHandler = null; public XConnectHandler xConnectHandler = null; McastHandler mcastHandler = null; HostHandler hostHandler = null; private RouteHandler routeHandler = null; LinkHandler linkHandler = null; private SegmentRoutingNeighbourDispatcher neighbourHandler = null; private DefaultL2TunnelHandler l2TunnelHandler = null; private TopologyHandler topologyHandler = null; private final InternalHostListener hostListener = new InternalHostListener(); private final InternalConfigListener cfgListener = new InternalConfigListener(this); private final InternalMcastListener mcastListener = new InternalMcastListener(); private final InternalRouteEventListener routeListener = new InternalRouteEventListener(); private final InternalTopologyListener topologyListener = new InternalTopologyListener(); // Handles device, link, topology and network config events private ScheduledExecutorService mainEventExecutor = Executors .newScheduledThreadPool(1, groupedThreads("sr-event-main", "%d", log)); // Handles host, route, mcast events private ScheduledExecutorService hostEventExecutor = Executors .newScheduledThreadPool(1, groupedThreads("sr-event-host", "%d", log)); private ScheduledExecutorService routeEventExecutor = Executors .newScheduledThreadPool(1, groupedThreads("sr-event-route", "%d", log)); private ScheduledExecutorService mcastEventExecutor = Executors .newScheduledThreadPool(1, groupedThreads("sr-event-mcast", "%d", log)); Map<DeviceId, DefaultGroupHandler> groupHandlerMap = new ConcurrentHashMap<>(); /** * Per device next objective ID store with (device id + destination set) as key. * Used to keep track on MPLS group information. */ private EventuallyConsistentMap<DestinationSetNextObjectiveStoreKey, NextNeighbors> dsNextObjStore = null; /** * Per device next objective ID store with (device id + vlanid) as key. * Used to keep track on L2 flood group information. */ private EventuallyConsistentMap<VlanNextObjectiveStoreKey, Integer> vlanNextObjStore = null; /** * Per device next objective ID store with (device id + port + treatment + meta) as key. * Used to keep track on L2 interface group and L3 unicast group information. */ private EventuallyConsistentMap<PortNextObjectiveStoreKey, Integer> portNextObjStore = null; private EventuallyConsistentMap<String, Tunnel> tunnelStore = null; private EventuallyConsistentMap<String, Policy> policyStore = null; private AtomicBoolean programmingScheduled = new AtomicBoolean(); private final ConfigFactory<DeviceId, SegmentRoutingDeviceConfig> deviceConfigFactory = new ConfigFactory<DeviceId, SegmentRoutingDeviceConfig>( SubjectFactories.DEVICE_SUBJECT_FACTORY, SegmentRoutingDeviceConfig.class, "segmentrouting") { @Override public SegmentRoutingDeviceConfig createConfig() { return new SegmentRoutingDeviceConfig(); } }; private final ConfigFactory<ApplicationId, SegmentRoutingAppConfig> appConfigFactory = new ConfigFactory<ApplicationId, SegmentRoutingAppConfig>( SubjectFactories.APP_SUBJECT_FACTORY, SegmentRoutingAppConfig.class, "segmentrouting") { @Override public SegmentRoutingAppConfig createConfig() { return new SegmentRoutingAppConfig(); } }; private final ConfigFactory<ApplicationId, XConnectConfig> xConnectConfigFactory = new ConfigFactory<ApplicationId, XConnectConfig>( SubjectFactories.APP_SUBJECT_FACTORY, XConnectConfig.class, "xconnect") { @Override public XConnectConfig createConfig() { return new XConnectConfig(); } }; private ConfigFactory<ApplicationId, McastConfig> mcastConfigFactory = new ConfigFactory<ApplicationId, McastConfig>( SubjectFactories.APP_SUBJECT_FACTORY, McastConfig.class, "multicast") { @Override public McastConfig createConfig() { return new McastConfig(); } }; private static final Object THREAD_SCHED_LOCK = new Object(); private static int numOfEventsQueued = 0; private static int numOfEventsExecuted = 0; private static int numOfHandlerExecution = 0; private static int numOfHandlerScheduled = 0; /** * Segment Routing App ID. */ public static final String APP_NAME = "org.onosproject.segmentrouting"; /** * The default VLAN ID assigned to the interfaces without subnet config. */ public static final VlanId INTERNAL_VLAN = VlanId.vlanId((short) 4094); @Activate protected void activate(ComponentContext context) { appId = coreService.registerApplication(APP_NAME); log.debug("Creating EC map nsnextobjectivestore"); EventuallyConsistentMapBuilder<DestinationSetNextObjectiveStoreKey, NextNeighbors> nsNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); dsNextObjStore = nsNextObjMapBuilder .withName("nsnextobjectivestore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); log.trace("Current size {}", dsNextObjStore.size()); log.debug("Creating EC map vlannextobjectivestore"); EventuallyConsistentMapBuilder<VlanNextObjectiveStoreKey, Integer> vlanNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); vlanNextObjStore = vlanNextObjMapBuilder .withName("vlannextobjectivestore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); log.debug("Creating EC map subnetnextobjectivestore"); EventuallyConsistentMapBuilder<PortNextObjectiveStoreKey, Integer> portNextObjMapBuilder = storageService.eventuallyConsistentMapBuilder(); portNextObjStore = portNextObjMapBuilder .withName("portnextobjectivestore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); EventuallyConsistentMapBuilder<String, Tunnel> tunnelMapBuilder = storageService.eventuallyConsistentMapBuilder(); tunnelStore = tunnelMapBuilder .withName("tunnelstore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); EventuallyConsistentMapBuilder<String, Policy> policyMapBuilder = storageService.eventuallyConsistentMapBuilder(); policyStore = policyMapBuilder .withName("policystore") .withSerializer(createSerializer()) .withTimestampProvider((k, v) -> new WallClockTimestamp()) .build(); compCfgService.preSetProperty("org.onosproject.net.group.impl.GroupManager", "purgeOnDisconnection", "true"); compCfgService.preSetProperty("org.onosproject.net.flow.impl.FlowRuleManager", "purgeOnDisconnection", "true"); compCfgService.preSetProperty("org.onosproject.provider.host.impl.HostLocationProvider", "requestInterceptsEnabled", "false"); compCfgService.preSetProperty("org.onosproject.net.neighbour.impl.NeighbourResolutionManager", "requestInterceptsEnabled", "false"); compCfgService.preSetProperty("org.onosproject.dhcprelay.DhcpRelayManager", "arpEnabled", "false"); compCfgService.preSetProperty("org.onosproject.net.host.impl.HostManager", "greedyLearningIpv6", "true"); compCfgService.preSetProperty("org.onosproject.routing.cpr.ControlPlaneRedirectManager", "forceUnprovision", "true"); compCfgService.preSetProperty("org.onosproject.routeservice.store.RouteStoreImpl", "distributed", "true"); compCfgService.preSetProperty("org.onosproject.provider.host.impl.HostLocationProvider", "multihomingEnabled", "true"); compCfgService.preSetProperty("org.onosproject.provider.lldp.impl.LldpLinkProvider", "staleLinkAge", "15000"); compCfgService.preSetProperty("org.onosproject.net.host.impl.HostManager", "allowDuplicateIps", "false"); compCfgService.registerProperties(getClass()); modified(context); processor = new InternalPacketProcessor(); linkListener = new InternalLinkListener(); deviceListener = new InternalDeviceListener(); appCfgHandler = new AppConfigHandler(this); xConnectHandler = new XConnectHandler(this); mcastHandler = new McastHandler(this); hostHandler = new HostHandler(this); linkHandler = new LinkHandler(this); routeHandler = new RouteHandler(this); neighbourHandler = new SegmentRoutingNeighbourDispatcher(this); l2TunnelHandler = new DefaultL2TunnelHandler(this); topologyHandler = new TopologyHandler(this); cfgService.addListener(cfgListener); cfgService.registerConfigFactory(deviceConfigFactory); cfgService.registerConfigFactory(appConfigFactory); cfgService.registerConfigFactory(xConnectConfigFactory); cfgService.registerConfigFactory(mcastConfigFactory); log.info("Configuring network before adding listeners"); cfgListener.configureNetwork(); hostService.addListener(hostListener); packetService.addProcessor(processor, PacketProcessor.director(2)); linkService.addListener(linkListener); deviceService.addListener(deviceListener); multicastRouteService.addListener(mcastListener); routeService.addListener(routeListener); topologyService.addListener(topologyListener); linkHandler.init(); l2TunnelHandler.init(); log.info("Started"); } KryoNamespace.Builder createSerializer() { return new KryoNamespace.Builder() .register(KryoNamespaces.API) .register(DestinationSetNextObjectiveStoreKey.class, VlanNextObjectiveStoreKey.class, DestinationSet.class, NextNeighbors.class, Tunnel.class, DefaultTunnel.class, Policy.class, TunnelPolicy.class, Policy.Type.class, PortNextObjectiveStoreKey.class, XConnectStoreKey.class, L2Tunnel.class, L2TunnelPolicy.class, DefaultL2Tunnel.class, DefaultL2TunnelPolicy.class ); } @Deactivate protected void deactivate() { cfgService.removeListener(cfgListener); cfgService.unregisterConfigFactory(deviceConfigFactory); cfgService.unregisterConfigFactory(appConfigFactory); cfgService.unregisterConfigFactory(xConnectConfigFactory); cfgService.unregisterConfigFactory(mcastConfigFactory); compCfgService.unregisterProperties(getClass(), false); hostService.removeListener(hostListener); packetService.removeProcessor(processor); linkService.removeListener(linkListener); deviceService.removeListener(deviceListener); multicastRouteService.removeListener(mcastListener); routeService.removeListener(routeListener); topologyService.removeListener(topologyListener); neighbourResolutionService.unregisterNeighbourHandlers(appId); processor = null; linkListener = null; deviceListener = null; groupHandlerMap.clear(); dsNextObjStore.destroy(); vlanNextObjStore.destroy(); portNextObjStore.destroy(); tunnelStore.destroy(); policyStore.destroy(); mcastHandler.terminate(); log.info("Stopped"); } @Modified private void modified(ComponentContext context) { Dictionary<?, ?> properties = context.getProperties(); if (properties == null) { return; } String strActiveProving = Tools.get(properties, "activeProbing"); boolean expectActiveProbing = Boolean.parseBoolean(strActiveProving); if (expectActiveProbing != activeProbing) { activeProbing = expectActiveProbing; log.info("{} active probing", activeProbing ? "Enabling" : "Disabling"); } } @Override public List<Tunnel> getTunnels() { return tunnelHandler.getTunnels(); } @Override public TunnelHandler.Result createTunnel(Tunnel tunnel) { return tunnelHandler.createTunnel(tunnel); } @Override public TunnelHandler.Result removeTunnel(Tunnel tunnel) { for (Policy policy: policyHandler.getPolicies()) { if (policy.type() == Policy.Type.TUNNEL_FLOW) { TunnelPolicy tunnelPolicy = (TunnelPolicy) policy; if (tunnelPolicy.tunnelId().equals(tunnel.id())) { log.warn("Cannot remove the tunnel used by a policy"); return TunnelHandler.Result.TUNNEL_IN_USE; } } } return tunnelHandler.removeTunnel(tunnel); } @Override public PolicyHandler.Result removePolicy(Policy policy) { return policyHandler.removePolicy(policy); } @Override public PolicyHandler.Result createPolicy(Policy policy) { return policyHandler.createPolicy(policy); } @Override public List<Policy> getPolicies() { return policyHandler.getPolicies(); } @Override public Set<L2TunnelDescription> getL2TunnelDescriptions(boolean pending) { return l2TunnelHandler.getL2Descriptions(pending); } @Override public List<L2Tunnel> getL2Tunnels() { return l2TunnelHandler.getL2Tunnels(); } @Override public List<L2TunnelPolicy> getL2Policies() { return l2TunnelHandler.getL2Policies(); } @Override @Deprecated public L2TunnelHandler.Result addPseudowiresBulk(List<DefaultL2TunnelDescription> bulkPseudowires) { // get both added and pending pseudowires List<L2TunnelDescription> pseudowires = new ArrayList<>(); pseudowires.addAll(l2TunnelHandler.getL2Descriptions(false)); pseudowires.addAll(l2TunnelHandler.getL2Descriptions(true)); pseudowires.addAll(bulkPseudowires); Set<L2TunnelDescription> newPseudowires = new HashSet(bulkPseudowires); L2TunnelHandler.Result retRes = L2TunnelHandler.Result.SUCCESS; L2TunnelHandler.Result res; for (DefaultL2TunnelDescription pw : bulkPseudowires) { res = addPseudowire(pw); if (res != L2TunnelHandler.Result.SUCCESS) { log.error("Pseudowire with id {} can not be instantiated !", res); retRes = res; } } return retRes; } @Override public L2TunnelHandler.Result addPseudowire(L2TunnelDescription l2TunnelDescription) { return l2TunnelHandler.deployPseudowire(l2TunnelDescription); } @Override public L2TunnelHandler.Result removePseudowire(Integer pwId) { return l2TunnelHandler.tearDownPseudowire(pwId); } @Override public void rerouteNetwork() { cfgListener.configureNetwork(); } @Override public Map<DeviceId, Set<IpPrefix>> getDeviceSubnetMap() { Map<DeviceId, Set<IpPrefix>> deviceSubnetMap = Maps.newHashMap(); deviceConfiguration.getRouters().forEach(device -> deviceSubnetMap.put(device, deviceConfiguration.getSubnets(device))); return deviceSubnetMap; } @Override public ImmutableMap<DeviceId, EcmpShortestPathGraph> getCurrentEcmpSpg() { if (defaultRoutingHandler != null) { return defaultRoutingHandler.getCurrentEmcpSpgMap(); } else { return null; } } @Override public ImmutableMap<DestinationSetNextObjectiveStoreKey, NextNeighbors> getDestinationSet() { if (dsNextObjStore != null) { return ImmutableMap.copyOf(dsNextObjStore.entrySet()); } else { return ImmutableMap.of(); } } @Override public void verifyGroups(DeviceId id) { DefaultGroupHandler gh = groupHandlerMap.get(id); if (gh != null) { gh.triggerBucketCorrector(); } } @Override public ImmutableMap<Link, Boolean> getSeenLinks() { return linkHandler.getSeenLinks(); } @Override public ImmutableMap<DeviceId, Set<PortNumber>> getDownedPortState() { return linkHandler.getDownedPorts(); } @Override public Map<McastStoreKey, Integer> getMcastNextIds(IpAddress mcastIp) { return mcastHandler.getMcastNextIds(mcastIp); } @Override public Map<McastStoreKey, McastRole> getMcastRoles(IpAddress mcastIp) { return mcastHandler.getMcastRoles(mcastIp); } @Override public Map<ConnectPoint, List<ConnectPoint>> getMcastPaths(IpAddress mcastIp) { return mcastHandler.getMcastPaths(mcastIp); } /** * Extracts the application ID from the manager. * * @return application ID */ public ApplicationId appId() { return appId; } /** * Returns the device configuration. * * @return device configuration */ public DeviceConfiguration deviceConfiguration() { return deviceConfiguration; } /** * Per device next objective ID store with (device id + destination set) as key. * Used to keep track on MPLS group information. * * @return next objective ID store */ public EventuallyConsistentMap<DestinationSetNextObjectiveStoreKey, NextNeighbors> dsNextObjStore() { return dsNextObjStore; } /** * Per device next objective ID store with (device id + vlanid) as key. * Used to keep track on L2 flood group information. * * @return vlan next object store */ public EventuallyConsistentMap<VlanNextObjectiveStoreKey, Integer> vlanNextObjStore() { return vlanNextObjStore; } /** * Per device next objective ID store with (device id + port + treatment + meta) as key. * Used to keep track on L2 interface group and L3 unicast group information. * * @return port next object store. */ public EventuallyConsistentMap<PortNextObjectiveStoreKey, Integer> portNextObjStore() { return portNextObjStore; } /** * Returns the MPLS-ECMP configuration which indicates whether ECMP on * labeled packets should be programmed or not. * * @return MPLS-ECMP value */ public boolean getMplsEcmp() { SegmentRoutingAppConfig segmentRoutingAppConfig = cfgService .getConfig(this.appId, SegmentRoutingAppConfig.class); return segmentRoutingAppConfig != null && segmentRoutingAppConfig.mplsEcmp(); } /** * Returns the tunnel object with the tunnel ID. * * @param tunnelId Tunnel ID * @return Tunnel reference */ public Tunnel getTunnel(String tunnelId) { return tunnelHandler.getTunnel(tunnelId); } // TODO Consider moving these to InterfaceService /** * Returns untagged VLAN configured on given connect point. * <p> * Only returns the first match if there are multiple untagged VLAN configured * on the connect point. * * @param connectPoint connect point * @return untagged VLAN or null if not configured */ VlanId getUntaggedVlanId(ConnectPoint connectPoint) { return interfaceService.getInterfacesByPort(connectPoint).stream() .filter(intf -> !intf.vlanUntagged().equals(VlanId.NONE)) .map(Interface::vlanUntagged) .findFirst().orElse(null); } /** * Returns tagged VLAN configured on given connect point. * <p> * Returns all matches if there are multiple tagged VLAN configured * on the connect point. * * @param connectPoint connect point * @return tagged VLAN or empty set if not configured */ Set<VlanId> getTaggedVlanId(ConnectPoint connectPoint) { Set<Interface> interfaces = interfaceService.getInterfacesByPort(connectPoint); return interfaces.stream() .map(Interface::vlanTagged) .flatMap(Set::stream) .collect(Collectors.toSet()); } /** * Returns native VLAN configured on given connect point. * <p> * Only returns the first match if there are multiple native VLAN configured * on the connect point. * * @param connectPoint connect point * @return native VLAN or null if not configured */ VlanId getNativeVlanId(ConnectPoint connectPoint) { Set<Interface> interfaces = interfaceService.getInterfacesByPort(connectPoint); return interfaces.stream() .filter(intf -> !intf.vlanNative().equals(VlanId.NONE)) .map(Interface::vlanNative) .findFirst() .orElse(null); } /** * Returns internal VLAN for untagged hosts on given connect point. * <p> * The internal VLAN is either vlan-untagged for an access port, * or vlan-native for a trunk port. * * @param connectPoint connect point * @return internal VLAN or null if both vlan-untagged and vlan-native are undefined */ public VlanId getInternalVlanId(ConnectPoint connectPoint) { VlanId untaggedVlanId = getUntaggedVlanId(connectPoint); VlanId nativeVlanId = getNativeVlanId(connectPoint); return untaggedVlanId != null ? untaggedVlanId : nativeVlanId; } /** * Returns optional pair device ID of given device. * * @param deviceId device ID * @return optional pair device ID. Might be empty if pair device is not configured */ Optional<DeviceId> getPairDeviceId(DeviceId deviceId) { SegmentRoutingDeviceConfig deviceConfig = cfgService.getConfig(deviceId, SegmentRoutingDeviceConfig.class); return Optional.ofNullable(deviceConfig).map(SegmentRoutingDeviceConfig::pairDeviceId); } /** * Returns optional pair device local port of given device. * * @param deviceId device ID * @return optional pair device ID. Might be empty if pair device is not configured */ Optional<PortNumber> getPairLocalPorts(DeviceId deviceId) { SegmentRoutingDeviceConfig deviceConfig = cfgService.getConfig(deviceId, SegmentRoutingDeviceConfig.class); return Optional.ofNullable(deviceConfig).map(SegmentRoutingDeviceConfig::pairLocalPort); } /** * Determine if current instance is the master of given connect point. * * @param cp connect point * @return true if current instance is the master of given connect point */ public boolean isMasterOf(ConnectPoint cp) { boolean isMaster = mastershipService.isLocalMaster(cp.deviceId()); if (!isMaster) { log.debug(NOT_MASTER, cp); } return isMaster; } /** * Returns locations of given resolved route. * * @param resolvedRoute resolved route * @return locations of nexthop. Might be empty if next hop is not found */ Set<ConnectPoint> nextHopLocations(ResolvedRoute resolvedRoute) { HostId hostId = HostId.hostId(resolvedRoute.nextHopMac(), resolvedRoute.nextHopVlan()); return Optional.ofNullable(hostService.getHost(hostId)) .map(Host::locations).orElse(Sets.newHashSet()) .stream().map(l -> (ConnectPoint) l).collect(Collectors.toSet()); } /** * Returns vlan port map of given device. * * @param deviceId device id * @return vlan-port multimap */ public Multimap<VlanId, PortNumber> getVlanPortMap(DeviceId deviceId) { HashMultimap<VlanId, PortNumber> vlanPortMap = HashMultimap.create(); interfaceService.getInterfaces().stream() .filter(intf -> intf.connectPoint().deviceId().equals(deviceId)) .forEach(intf -> { vlanPortMap.put(intf.vlanUntagged(), intf.connectPoint().port()); intf.vlanTagged().forEach(vlanTagged -> vlanPortMap.put(vlanTagged, intf.connectPoint().port()) ); vlanPortMap.put(intf.vlanNative(), intf.connectPoint().port()); }); vlanPortMap.removeAll(VlanId.NONE); return vlanPortMap; } /** * Returns the next objective ID for the given vlan id. It is expected * that the next-objective has been pre-created from configuration. * * @param deviceId Device ID * @param vlanId VLAN ID * @return next objective ID or -1 if it was not found */ int getVlanNextObjectiveId(DeviceId deviceId, VlanId vlanId) { if (groupHandlerMap.get(deviceId) != null) { log.trace("getVlanNextObjectiveId query in device {}", deviceId); return groupHandlerMap.get(deviceId).getVlanNextObjectiveId(vlanId); } else { log.warn("getVlanNextObjectiveId query - groupHandler for " + "device {} not found", deviceId); return -1; } } /** * Returns the next objective ID for the given portNumber, given the treatment. * There could be multiple different treatments to the same outport, which * would result in different objectives. If the next object does not exist, * and should be created, a new one is created and its id is returned. * * @param deviceId Device ID * @param portNum port number on device for which NextObjective is queried * @param treatment the actions to apply on the packets (should include outport) * @param meta metadata passed into the creation of a Next Objective if necessary * @param createIfMissing true if a next object should be created if not found * @return next objective ID or -1 if an error occurred during retrieval or creation */ public int getPortNextObjectiveId(DeviceId deviceId, PortNumber portNum, TrafficTreatment treatment, TrafficSelector meta, boolean createIfMissing) { DefaultGroupHandler ghdlr = groupHandlerMap.get(deviceId); if (ghdlr != null) { return ghdlr.getPortNextObjectiveId(portNum, treatment, meta, createIfMissing); } else { log.warn("getPortNextObjectiveId query - groupHandler for device {}" + " not found", deviceId); return -1; } } /** * Returns the group handler object for the specified device id. * * @param devId the device identifier * @return the groupHandler object for the device id, or null if not found */ DefaultGroupHandler getGroupHandler(DeviceId devId) { return groupHandlerMap.get(devId); } /** * Returns the default routing handler object. * * @return the default routing handler object */ public DefaultRoutingHandler getRoutingHandler() { return defaultRoutingHandler; } private class InternalPacketProcessor implements PacketProcessor { @Override public void process(PacketContext context) { if (context.isHandled()) { return; } InboundPacket pkt = context.inPacket(); Ethernet ethernet = pkt.parsed(); if (ethernet == null) { return; } log.trace("Rcvd pktin from {}: {}", context.inPacket().receivedFrom(), ethernet); if (ethernet.getEtherType() == TYPE_ARP) { log.warn("Received unexpected ARP packet on {}", context.inPacket().receivedFrom()); log.trace("{}", ethernet); return; } else if (ethernet.getEtherType() == Ethernet.TYPE_IPV4) { IPv4 ipv4Packet = (IPv4) ethernet.getPayload(); //ipHandler.addToPacketBuffer(ipv4Packet); if (ipv4Packet.getProtocol() == IPv4.PROTOCOL_ICMP) { icmpHandler.processIcmp(ethernet, pkt.receivedFrom()); } else { // NOTE: We don't support IP learning at this moment so this // is not necessary. Also it causes duplication of DHCP packets. // ipHandler.processPacketIn(ipv4Packet, pkt.receivedFrom()); } } else if (ethernet.getEtherType() == Ethernet.TYPE_IPV6) { IPv6 ipv6Packet = (IPv6) ethernet.getPayload(); //ipHandler.addToPacketBuffer(ipv6Packet); // We deal with the packet only if the packet is a ICMP6 ECHO/REPLY if (ipv6Packet.getNextHeader() == IPv6.PROTOCOL_ICMP6) { ICMP6 icmp6Packet = (ICMP6) ipv6Packet.getPayload(); if (icmp6Packet.getIcmpType() == ICMP6.ECHO_REQUEST || icmp6Packet.getIcmpType() == ICMP6.ECHO_REPLY) { icmpHandler.processIcmpv6(ethernet, pkt.receivedFrom()); } else { log.trace("Received ICMPv6 0x{} - not handled", Integer.toHexString(icmp6Packet.getIcmpType() & 0xff)); } } else { // NOTE: We don't support IP learning at this moment so this // is not necessary. Also it causes duplication of DHCPv6 packets. // ipHandler.processPacketIn(ipv6Packet, pkt.receivedFrom()); } } } } private class InternalEventHandler implements Runnable { private Event event; InternalEventHandler(Event event) { this.event = event; } @Override public void run() { try { // TODO We should also change SR routing and PW to listen to TopologyEvents if (event.type() == LinkEvent.Type.LINK_ADDED || event.type() == LinkEvent.Type.LINK_UPDATED) { linkHandler.processLinkAdded((Link) event.subject()); } else if (event.type() == LinkEvent.Type.LINK_REMOVED) { linkHandler.processLinkRemoved((Link) event.subject()); } else if (event.type() == DeviceEvent.Type.DEVICE_ADDED || event.type() == DeviceEvent.Type.DEVICE_AVAILABILITY_CHANGED || event.type() == DeviceEvent.Type.DEVICE_UPDATED) { DeviceId deviceId = ((Device) event.subject()).id(); if (deviceService.isAvailable(deviceId)) { log.info("** DEVICE UP Processing device event {} " + "for available device {}", event.type(), ((Device) event.subject()).id()); processDeviceAdded((Device) event.subject()); } else { log.info(" ** DEVICE DOWN Processing device event {}" + " for unavailable device {}", event.type(), ((Device) event.subject()).id()); processDeviceRemoved((Device) event.subject()); } } else if (event.type() == DeviceEvent.Type.PORT_ADDED) { // typically these calls come when device is added first time // so port filtering rules are handled at the device_added event. // port added calls represent all ports on the device, // enabled or not. log.trace("** PORT ADDED {}/{} -> {}", ((DeviceEvent) event).subject().id(), ((DeviceEvent) event).port().number(), event.type()); } else if (event.type() == DeviceEvent.Type.PORT_UPDATED) { // these calls happen for every subsequent event // ports enabled, disabled, switch goes away, comes back log.info("** PORT UPDATED {}/{} -> {}", event.subject(), ((DeviceEvent) event).port(), event.type()); processPortUpdated(((Device) event.subject()), ((DeviceEvent) event).port()); } else if (event.type() == TopologyEvent.Type.TOPOLOGY_CHANGED) { // Process topology event, needed for all modules relying on // topology service for path computation TopologyEvent topologyEvent = (TopologyEvent) event; log.info("Processing topology event {}, topology age {}, reasons {}", event.type(), topologyEvent.subject().time(), topologyEvent.reasons().size()); topologyHandler.processTopologyChange(topologyEvent.reasons()); } else if (event.type() == HostEvent.Type.HOST_ADDED) { hostHandler.processHostAddedEvent((HostEvent) event); } else if (event.type() == HostEvent.Type.HOST_MOVED) { hostHandler.processHostMovedEvent((HostEvent) event); routeHandler.processHostMovedEvent((HostEvent) event); } else if (event.type() == HostEvent.Type.HOST_REMOVED) { hostHandler.processHostRemovedEvent((HostEvent) event); } else if (event.type() == HostEvent.Type.HOST_UPDATED) { hostHandler.processHostUpdatedEvent((HostEvent) event); } else if (event.type() == RouteEvent.Type.ROUTE_ADDED) { routeHandler.processRouteAdded((RouteEvent) event); } else if (event.type() == RouteEvent.Type.ROUTE_UPDATED) { routeHandler.processRouteUpdated((RouteEvent) event); } else if (event.type() == RouteEvent.Type.ROUTE_REMOVED) { routeHandler.processRouteRemoved((RouteEvent) event); } else if (event.type() == RouteEvent.Type.ALTERNATIVE_ROUTES_CHANGED) { routeHandler.processAlternativeRoutesChanged((RouteEvent) event); } else if (event.type() == McastEvent.Type.SOURCES_ADDED || event.type() == McastEvent.Type.SOURCES_REMOVED || event.type() == McastEvent.Type.SINKS_ADDED || event.type() == McastEvent.Type.SINKS_REMOVED || event.type() == McastEvent.Type.ROUTE_ADDED || event.type() == McastEvent.Type.ROUTE_REMOVED) { mcastHandler.processMcastEvent((McastEvent) event); } else if (event.type() == NetworkConfigEvent.Type.CONFIG_ADDED) { NetworkConfigEvent netcfgEvent = (NetworkConfigEvent) event; Class configClass = netcfgEvent.configClass(); if (configClass.equals(SegmentRoutingAppConfig.class)) { appCfgHandler.processAppConfigAdded(netcfgEvent); log.info("App config event .. configuring network"); cfgListener.configureNetwork(); } else if (configClass.equals(SegmentRoutingDeviceConfig.class)) { log.info("Segment Routing Device Config added for {}", event.subject()); cfgListener.configureNetwork(); } else if (configClass.equals(XConnectConfig.class)) { xConnectHandler.processXConnectConfigAdded(netcfgEvent); } else if (configClass.equals(InterfaceConfig.class)) { log.info("Interface Config added for {}", event.subject()); cfgListener.configureNetwork(); } else { log.error("Unhandled config class: {}", configClass); } } else if (event.type() == NetworkConfigEvent.Type.CONFIG_UPDATED) { NetworkConfigEvent netcfgEvent = (NetworkConfigEvent) event; Class configClass = netcfgEvent.configClass(); if (configClass.equals(SegmentRoutingAppConfig.class)) { appCfgHandler.processAppConfigUpdated(netcfgEvent); log.info("App config event .. configuring network"); cfgListener.configureNetwork(); } else if (configClass.equals(SegmentRoutingDeviceConfig.class)) { log.info("Segment Routing Device Config updated for {}", event.subject()); createOrUpdateDeviceConfiguration(); } else if (configClass.equals(XConnectConfig.class)) { xConnectHandler.processXConnectConfigUpdated(netcfgEvent); } else if (configClass.equals(InterfaceConfig.class)) { log.info("Interface Config updated for {}", event.subject()); createOrUpdateDeviceConfiguration(); updateInterface((InterfaceConfig) netcfgEvent.config().get(), (InterfaceConfig) netcfgEvent.prevConfig().get()); } else { log.error("Unhandled config class: {}", configClass); } } else if (event.type() == NetworkConfigEvent.Type.CONFIG_REMOVED) { NetworkConfigEvent netcfgEvent = (NetworkConfigEvent) event; Class configClass = netcfgEvent.configClass(); if (configClass.equals(SegmentRoutingAppConfig.class)) { appCfgHandler.processAppConfigRemoved(netcfgEvent); log.info("App config event .. configuring network"); cfgListener.configureNetwork(); } else if (configClass.equals(SegmentRoutingDeviceConfig.class)) { // TODO Handle sr device config removal log.info("SegmentRoutingDeviceConfig removal is not handled in current implementation"); } else if (configClass.equals(XConnectConfig.class)) { xConnectHandler.processXConnectConfigRemoved(netcfgEvent); } else if (configClass.equals(InterfaceConfig.class)) { // TODO Handle interface removal log.info("InterfaceConfig removal is not handled in current implementation"); } else { log.error("Unhandled config class: {}", configClass); } } else { log.warn("Unhandled event type: {}", event.type()); } } catch (Exception e) { log.error("SegmentRouting event handler thread thrown an exception: {}", e.getMessage(), e); } } } void processDeviceAdded(Device device) { log.info("** DEVICE ADDED with ID {}", device.id()); // NOTE: Punt ARP/NDP even when the device is not configured. // Host learning without network config is required for CORD config generator. routingRulePopulator.populateIpPunts(device.id()); routingRulePopulator.populateArpNdpPunts(device.id()); if (deviceConfiguration == null || !deviceConfiguration.isConfigured(device.id())) { log.warn("Device configuration unavailable. Device {} will be " + "processed after configuration.", device.id()); return; } processDeviceAddedInternal(device.id()); } private void processDeviceAddedInternal(DeviceId deviceId) { // Irrespective of whether the local is a MASTER or not for this device, // we need to create a SR-group-handler instance. This is because in a // multi-instance setup, any instance can initiate forwarding/next-objectives // for any switch (even if this instance is a SLAVE or not even connected // to the switch). To handle this, a default-group-handler instance is necessary // per switch. log.debug("Current groupHandlerMap devs: {}", groupHandlerMap.keySet()); if (groupHandlerMap.get(deviceId) == null) { DefaultGroupHandler groupHandler; try { groupHandler = DefaultGroupHandler. createGroupHandler(deviceId, appId, deviceConfiguration, linkService, flowObjectiveService, this); } catch (DeviceConfigNotFoundException e) { log.warn(e.getMessage() + " Aborting processDeviceAdded."); return; } log.debug("updating groupHandlerMap with new grpHdlr for device: {}", deviceId); groupHandlerMap.put(deviceId, groupHandler); } if (mastershipService.isLocalMaster(deviceId)) { defaultRoutingHandler.populatePortAddressingRules(deviceId); xConnectHandler.init(deviceId); DefaultGroupHandler groupHandler = groupHandlerMap.get(deviceId); groupHandler.createGroupsFromVlanConfig(); routingRulePopulator.populateSubnetBroadcastRule(deviceId); } appCfgHandler.init(deviceId); hostHandler.init(deviceId); routeHandler.init(deviceId); } private void processDeviceRemoved(Device device) { dsNextObjStore.entrySet().stream() .filter(entry -> entry.getKey().deviceId().equals(device.id())) .forEach(entry -> dsNextObjStore.remove(entry.getKey())); vlanNextObjStore.entrySet().stream() .filter(entry -> entry.getKey().deviceId().equals(device.id())) .forEach(entry -> vlanNextObjStore.remove(entry.getKey())); portNextObjStore.entrySet().stream() .filter(entry -> entry.getKey().deviceId().equals(device.id())) .forEach(entry -> portNextObjStore.remove(entry.getKey())); linkHandler.processDeviceRemoved(device); DefaultGroupHandler gh = groupHandlerMap.remove(device.id()); if (gh != null) { gh.shutdown(); } // Note that a switch going down is associated with all of its links // going down as well, but it is treated as a single switch down event // while the link-downs are ignored. We cannot rely on the ordering of // events - i.e we cannot expect all link-downs to come before the // switch down - so we purge all seen-links for the switch before // handling route-path changes for the switch-down defaultRoutingHandler .populateRoutingRulesForLinkStatusChange(null, null, device.id(), true); defaultRoutingHandler.purgeEcmpGraph(device.id()); xConnectHandler.removeDevice(device.id()); // Cleanup all internal groupHandler stores for this device. Should be // done after all rerouting or rehashing has been completed groupHandlerMap.entrySet() .forEach(entry -> entry.getValue().cleanUpForNeighborDown(device.id())); } private void processPortUpdated(Device device, Port port) { if (deviceConfiguration == null || !deviceConfiguration.isConfigured(device.id())) { log.warn("Device configuration uploading. Not handling port event for" + "dev: {} port: {}", device.id(), port.number()); return; } if (!mastershipService.isLocalMaster(device.id())) { log.debug("Not master for dev:{} .. not handling port updated event" + "for port {}", device.id(), port.number()); return; } // first we handle filtering rules associated with the port if (port.isEnabled()) { log.info("Switchport {}/{} enabled..programming filters", device.id(), port.number()); routingRulePopulator.processSinglePortFilters(device.id(), port.number(), true); } else { log.info("Switchport {}/{} disabled..removing filters", device.id(), port.number()); routingRulePopulator.processSinglePortFilters(device.id(), port.number(), false); } // portUpdated calls are for ports that have gone down or up. For switch // to switch ports, link-events should take care of any re-routing or // group editing necessary for port up/down. Here we only process edge ports // that are already configured. ConnectPoint cp = new ConnectPoint(device.id(), port.number()); VlanId untaggedVlan = getUntaggedVlanId(cp); VlanId nativeVlan = getNativeVlanId(cp); Set<VlanId> taggedVlans = getTaggedVlanId(cp); if (untaggedVlan == null && nativeVlan == null && taggedVlans.isEmpty()) { log.debug("Not handling port updated event for non-edge port (unconfigured) " + "dev/port: {}/{}", device.id(), port.number()); return; } if (untaggedVlan != null) { processEdgePort(device, port, untaggedVlan, true); } if (nativeVlan != null) { processEdgePort(device, port, nativeVlan, true); } if (!taggedVlans.isEmpty()) { taggedVlans.forEach(tag -> processEdgePort(device, port, tag, false)); } } private void processEdgePort(Device device, Port port, VlanId vlanId, boolean popVlan) { boolean portUp = port.isEnabled(); if (portUp) { log.info("Device:EdgePort {}:{} is enabled in vlan: {}", device.id(), port.number(), vlanId); hostHandler.processPortUp(new ConnectPoint(device.id(), port.number())); } else { log.info("Device:EdgePort {}:{} is disabled in vlan: {}", device.id(), port.number(), vlanId); } DefaultGroupHandler groupHandler = groupHandlerMap.get(device.id()); if (groupHandler != null) { groupHandler.processEdgePort(port.number(), vlanId, popVlan, portUp); } else { log.warn("Group handler not found for dev:{}. Not handling edge port" + " {} event for port:{}", device.id(), (portUp) ? "UP" : "DOWN", port.number()); } } private void createOrUpdateDeviceConfiguration() { if (deviceConfiguration == null) { log.info("Creating new DeviceConfiguration"); deviceConfiguration = new DeviceConfiguration(this); } else { log.info("Updating DeviceConfiguration"); deviceConfiguration.updateConfig(); } } /** * Registers the given connect point with the NRS, this is necessary * to receive the NDP and ARP packets from the NRS. * * @param portToRegister connect point to register */ public void registerConnectPoint(ConnectPoint portToRegister) { neighbourResolutionService.registerNeighbourHandler( portToRegister, neighbourHandler, appId ); } private class InternalConfigListener implements NetworkConfigListener { private static final long PROGRAM_DELAY = 2; SegmentRoutingManager srManager; /** * Constructs the internal network config listener. * * @param srManager segment routing manager */ InternalConfigListener(SegmentRoutingManager srManager) { this.srManager = srManager; } /** * Reads network config and initializes related data structure accordingly. */ void configureNetwork() { log.info("Configuring network ..."); createOrUpdateDeviceConfiguration(); arpHandler = new ArpHandler(srManager); icmpHandler = new IcmpHandler(srManager); ipHandler = new IpHandler(srManager); routingRulePopulator = new RoutingRulePopulator(srManager); defaultRoutingHandler = new DefaultRoutingHandler(srManager); tunnelHandler = new TunnelHandler(linkService, deviceConfiguration, groupHandlerMap, tunnelStore); policyHandler = new PolicyHandler(appId, deviceConfiguration, flowObjectiveService, tunnelHandler, policyStore); // add a small delay to absorb multiple network config added notifications if (!programmingScheduled.get()) { log.info("Buffering config calls for {} secs", PROGRAM_DELAY); programmingScheduled.set(true); mainEventExecutor.schedule(new ConfigChange(), PROGRAM_DELAY, TimeUnit.SECONDS); } mcastHandler.init(); } @Override public void event(NetworkConfigEvent event) { checkState(appCfgHandler != null, "NetworkConfigEventHandler is not initialized"); checkState(xConnectHandler != null, "XConnectHandler is not initialized"); switch (event.type()) { case CONFIG_ADDED: case CONFIG_UPDATED: case CONFIG_REMOVED: log.trace("Schedule Network Config event {}", event); mainEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); break; default: break; } } @Override public boolean isRelevant(NetworkConfigEvent event) { if (event.type() == CONFIG_REGISTERED || event.type() == CONFIG_UNREGISTERED) { log.debug("Ignore event {} due to type mismatch", event); return false; } if (!event.configClass().equals(SegmentRoutingDeviceConfig.class) && !event.configClass().equals(SegmentRoutingAppConfig.class) && !event.configClass().equals(InterfaceConfig.class) && !event.configClass().equals(XConnectConfig.class)) { log.debug("Ignore event {} due to class mismatch", event); return false; } return true; } private final class ConfigChange implements Runnable { @Override public void run() { programmingScheduled.set(false); log.info("Reacting to config changes after buffer delay"); for (Device device : deviceService.getDevices()) { processDeviceAdded(device); } defaultRoutingHandler.startPopulationProcess(); } } } private class InternalLinkListener implements LinkListener { @Override public void event(LinkEvent event) { if (event.type() == LinkEvent.Type.LINK_ADDED || event.type() == LinkEvent.Type.LINK_UPDATED || event.type() == LinkEvent.Type.LINK_REMOVED) { log.trace("Schedule Link event {}", event); mainEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); } } } private class InternalDeviceListener implements DeviceListener { @Override public void event(DeviceEvent event) { switch (event.type()) { case DEVICE_ADDED: case PORT_UPDATED: case PORT_ADDED: case DEVICE_UPDATED: case DEVICE_AVAILABILITY_CHANGED: log.trace("Schedule Device event {}", event); mainEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); break; default: } } } private class InternalTopologyListener implements TopologyListener { @Override public void event(TopologyEvent event) { switch (event.type()) { case TOPOLOGY_CHANGED: log.trace("Schedule Topology event {}", event); mainEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); break; default: } } } private class InternalHostListener implements HostListener { @Override public void event(HostEvent event) { switch (event.type()) { case HOST_ADDED: case HOST_MOVED: case HOST_REMOVED: case HOST_UPDATED: log.trace("Schedule Host event {}", event); hostEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); break; default: log.warn("Unsupported host event type: {}", event.type()); break; } } } private class InternalMcastListener implements McastListener { @Override public void event(McastEvent event) { switch (event.type()) { case SOURCES_ADDED: case SOURCES_REMOVED: case SINKS_ADDED: case SINKS_REMOVED: case ROUTE_REMOVED: log.trace("Schedule Mcast event {}", event); mcastEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); break; case ROUTE_ADDED: default: log.warn("Unsupported mcast event type: {}", event.type()); break; } } } private class InternalRouteEventListener implements RouteListener { @Override public void event(RouteEvent event) { switch (event.type()) { case ROUTE_ADDED: case ROUTE_UPDATED: case ROUTE_REMOVED: case ALTERNATIVE_ROUTES_CHANGED: log.trace("Schedule Route event {}", event); routeEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); break; default: log.warn("Unsupported route event type: {}", event.type()); break; } } } private void updateInterface(InterfaceConfig conf, InterfaceConfig prevConf) { try { Set<Interface> intfs = conf.getInterfaces(); Set<Interface> prevIntfs = prevConf.getInterfaces(); // Now we only handle one interface config at each port. if (intfs.size() != 1 || prevIntfs.size() != 1) { log.warn("Interface update aborted - one at a time is allowed, " + "but {} / {}(prev) received.", intfs.size(), prevIntfs.size()); return; } Interface intf = intfs.stream().findFirst().get(); Interface prevIntf = prevIntfs.stream().findFirst().get(); DeviceId deviceId = intf.connectPoint().deviceId(); PortNumber portNum = intf.connectPoint().port(); removeSubnetConfig(prevIntf.connectPoint(), Sets.difference(new HashSet<>(prevIntf.ipAddressesList()), new HashSet<>(intf.ipAddressesList()))); if (!prevIntf.vlanNative().equals(VlanId.NONE) && !prevIntf.vlanNative().equals(intf.vlanUntagged()) && !prevIntf.vlanNative().equals(intf.vlanNative())) { if (intf.vlanTagged().contains(prevIntf.vlanNative())) { // Update filtering objective and L2IG group bucket updatePortVlanTreatment(deviceId, portNum, prevIntf.vlanNative(), false); } else { // RemoveVlanNative updateVlanConfigInternal(deviceId, portNum, prevIntf.vlanNative(), true, false); } } if (!prevIntf.vlanUntagged().equals(VlanId.NONE) && !prevIntf.vlanUntagged().equals(intf.vlanUntagged()) && !prevIntf.vlanUntagged().equals(intf.vlanNative())) { if (intf.vlanTagged().contains(prevIntf.vlanUntagged())) { // Update filtering objective and L2IG group bucket updatePortVlanTreatment(deviceId, portNum, prevIntf.vlanUntagged(), false); } else { // RemoveVlanUntagged updateVlanConfigInternal(deviceId, portNum, prevIntf.vlanUntagged(), true, false); } } if (!prevIntf.vlanTagged().isEmpty() && !intf.vlanTagged().equals(prevIntf.vlanTagged())) { // RemoveVlanTagged Sets.difference(prevIntf.vlanTagged(), intf.vlanTagged()).stream() .filter(i -> !intf.vlanUntagged().equals(i)) .filter(i -> !intf.vlanNative().equals(i)) .forEach(vlanId -> updateVlanConfigInternal( deviceId, portNum, vlanId, false, false)); } if (!intf.vlanNative().equals(VlanId.NONE) && !prevIntf.vlanNative().equals(intf.vlanNative()) && !prevIntf.vlanUntagged().equals(intf.vlanNative())) { if (prevIntf.vlanTagged().contains(intf.vlanNative())) { // Update filtering objective and L2IG group bucket updatePortVlanTreatment(deviceId, portNum, intf.vlanNative(), true); } else { // AddVlanNative updateVlanConfigInternal(deviceId, portNum, intf.vlanNative(), true, true); } } if (!intf.vlanTagged().isEmpty() && !intf.vlanTagged().equals(prevIntf.vlanTagged())) { // AddVlanTagged Sets.difference(intf.vlanTagged(), prevIntf.vlanTagged()).stream() .filter(i -> !prevIntf.vlanUntagged().equals(i)) .filter(i -> !prevIntf.vlanNative().equals(i)) .forEach(vlanId -> updateVlanConfigInternal( deviceId, portNum, vlanId, false, true) ); } if (!intf.vlanUntagged().equals(VlanId.NONE) && !prevIntf.vlanUntagged().equals(intf.vlanUntagged()) && !prevIntf.vlanNative().equals(intf.vlanUntagged())) { if (prevIntf.vlanTagged().contains(intf.vlanUntagged())) { // Update filtering objective and L2IG group bucket updatePortVlanTreatment(deviceId, portNum, intf.vlanUntagged(), true); } else { // AddVlanUntagged updateVlanConfigInternal(deviceId, portNum, intf.vlanUntagged(), true, true); } } addSubnetConfig(prevIntf.connectPoint(), Sets.difference(new HashSet<>(intf.ipAddressesList()), new HashSet<>(prevIntf.ipAddressesList()))); } catch (ConfigException e) { log.error("Error in configuration"); } } private void updatePortVlanTreatment(DeviceId deviceId, PortNumber portNum, VlanId vlanId, boolean pushVlan) { DefaultGroupHandler grpHandler = getGroupHandler(deviceId); if (grpHandler == null) { log.warn("Failed to retrieve group handler for device {}", deviceId); return; } // Update filtering objective for a single port routingRulePopulator.updateSinglePortFilters(deviceId, portNum, !pushVlan, vlanId, false); routingRulePopulator.updateSinglePortFilters(deviceId, portNum, pushVlan, vlanId, true); if (getVlanNextObjectiveId(deviceId, vlanId) != -1) { // Update L2IG bucket of the port grpHandler.updateL2InterfaceGroupBucket(portNum, vlanId, pushVlan); } else { log.warn("Failed to retrieve next objective for vlan {} in device {}:{}", vlanId, deviceId, portNum); } } private void updateVlanConfigInternal(DeviceId deviceId, PortNumber portNum, VlanId vlanId, boolean pushVlan, boolean install) { DefaultGroupHandler grpHandler = getGroupHandler(deviceId); if (grpHandler == null) { log.warn("Failed to retrieve group handler for device {}", deviceId); return; } // Update filtering objective for a single port routingRulePopulator.updateSinglePortFilters(deviceId, portNum, pushVlan, vlanId, install); // Update filtering objective for multicast ingress port mcastHandler.updateFilterToDevice(deviceId, portNum, vlanId, install); int nextId = getVlanNextObjectiveId(deviceId, vlanId); if (nextId != -1 && !install) { // Update next objective for a single port as an output port // Remove a single port from L2FG grpHandler.updateGroupFromVlanConfiguration(vlanId, portNum, nextId, install); // Remove L2 Bridging rule and L3 Unicast rule to the host hostHandler.processIntfVlanUpdatedEvent(deviceId, portNum, vlanId, pushVlan, install); // Remove broadcast forwarding rule and corresponding L2FG for VLAN // only if there is no port configured on that VLAN ID if (!getVlanPortMap(deviceId).containsKey(vlanId)) { // Remove broadcast forwarding rule for the VLAN routingRulePopulator.updateSubnetBroadcastRule(deviceId, vlanId, install); // Remove L2FG for VLAN grpHandler.removeBcastGroupFromVlan(deviceId, portNum, vlanId, pushVlan); } else { // Remove L2IG of the port grpHandler.removePortNextObjective(deviceId, portNum, vlanId, pushVlan); } } else if (install) { if (nextId != -1) { // Add a single port to L2FG grpHandler.updateGroupFromVlanConfiguration(vlanId, portNum, nextId, install); } else { // Create L2FG for VLAN grpHandler.createBcastGroupFromVlan(vlanId, Collections.singleton(portNum)); routingRulePopulator.updateSubnetBroadcastRule(deviceId, vlanId, install); } hostHandler.processIntfVlanUpdatedEvent(deviceId, portNum, vlanId, pushVlan, install); } else { log.warn("Failed to retrieve next objective for vlan {} in device {}:{}", vlanId, deviceId, portNum); } } private void removeSubnetConfig(ConnectPoint cp, Set<InterfaceIpAddress> ipAddressSet) { Set<IpPrefix> ipPrefixSet = ipAddressSet.stream(). map(InterfaceIpAddress::subnetAddress).collect(Collectors.toSet()); Set<InterfaceIpAddress> deviceIntfIpAddrs = interfaceService.getInterfaces().stream() .filter(intf -> intf.connectPoint().deviceId().equals(cp.deviceId())) .filter(intf -> !intf.connectPoint().equals(cp)) .flatMap(intf -> intf.ipAddressesList().stream()) .collect(Collectors.toSet()); // 1. Partial subnet population // Remove routing rules for removed subnet from previous configuration, // which does not also exist in other interfaces in the same device Set<IpPrefix> deviceIpPrefixSet = deviceIntfIpAddrs.stream() .map(InterfaceIpAddress::subnetAddress) .collect(Collectors.toSet()); defaultRoutingHandler.revokeSubnet( ipPrefixSet.stream() .filter(ipPrefix -> !deviceIpPrefixSet.contains(ipPrefix)) .collect(Collectors.toSet())); // 2. Interface IP punts // Remove IP punts for old Intf address Set<IpAddress> deviceIpAddrs = deviceIntfIpAddrs.stream() .map(InterfaceIpAddress::ipAddress) .collect(Collectors.toSet()); ipAddressSet.stream() .map(InterfaceIpAddress::ipAddress) .filter(interfaceIpAddress -> !deviceIpAddrs.contains(interfaceIpAddress)) .forEach(interfaceIpAddress -> routingRulePopulator.revokeSingleIpPunts( cp.deviceId(), interfaceIpAddress)); // 3. Host unicast routing rule // Remove unicast routing rule hostHandler.processIntfIpUpdatedEvent(cp, ipPrefixSet, false); } private void addSubnetConfig(ConnectPoint cp, Set<InterfaceIpAddress> ipAddressSet) { Set<IpPrefix> ipPrefixSet = ipAddressSet.stream(). map(InterfaceIpAddress::subnetAddress).collect(Collectors.toSet()); Set<InterfaceIpAddress> deviceIntfIpAddrs = interfaceService.getInterfaces().stream() .filter(intf -> intf.connectPoint().deviceId().equals(cp.deviceId())) .filter(intf -> !intf.connectPoint().equals(cp)) .flatMap(intf -> intf.ipAddressesList().stream()) .collect(Collectors.toSet()); // 1. Partial subnet population // Add routing rules for newly added subnet, which does not also exist in // other interfaces in the same device Set<IpPrefix> deviceIpPrefixSet = deviceIntfIpAddrs.stream() .map(InterfaceIpAddress::subnetAddress) .collect(Collectors.toSet()); defaultRoutingHandler.populateSubnet( Collections.singleton(cp), ipPrefixSet.stream() .filter(ipPrefix -> !deviceIpPrefixSet.contains(ipPrefix)) .collect(Collectors.toSet())); // 2. Interface IP punts // Add IP punts for new Intf address Set<IpAddress> deviceIpAddrs = deviceIntfIpAddrs.stream() .map(InterfaceIpAddress::ipAddress) .collect(Collectors.toSet()); ipAddressSet.stream() .map(InterfaceIpAddress::ipAddress) .filter(interfaceIpAddress -> !deviceIpAddrs.contains(interfaceIpAddress)) .forEach(interfaceIpAddress -> routingRulePopulator.populateSingleIpPunts( cp.deviceId(), interfaceIpAddress)); // 3. Host unicast routing rule // Add unicast routing rule hostHandler.processIntfIpUpdatedEvent(cp, ipPrefixSet, true); } }
Remove event handling delay Change-Id: I890ac8643c6b331f6d1f660ef634801c305be55a
apps/segmentrouting/app/src/main/java/org/onosproject/segmentrouting/SegmentRoutingManager.java
Remove event handling delay
<ide><path>pps/segmentrouting/app/src/main/java/org/onosproject/segmentrouting/SegmentRoutingManager.java <ide> case CONFIG_UPDATED: <ide> case CONFIG_REMOVED: <ide> log.trace("Schedule Network Config event {}", event); <del> mainEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); <add> mainEventExecutor.execute(new InternalEventHandler(event)); <ide> break; <ide> default: <ide> break; <ide> event.type() == LinkEvent.Type.LINK_UPDATED || <ide> event.type() == LinkEvent.Type.LINK_REMOVED) { <ide> log.trace("Schedule Link event {}", event); <del> mainEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); <add> mainEventExecutor.execute(new InternalEventHandler(event)); <ide> } <ide> } <ide> } <ide> case DEVICE_UPDATED: <ide> case DEVICE_AVAILABILITY_CHANGED: <ide> log.trace("Schedule Device event {}", event); <del> mainEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); <add> mainEventExecutor.execute(new InternalEventHandler(event)); <ide> break; <ide> default: <ide> } <ide> switch (event.type()) { <ide> case TOPOLOGY_CHANGED: <ide> log.trace("Schedule Topology event {}", event); <del> mainEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); <add> mainEventExecutor.execute(new InternalEventHandler(event)); <ide> break; <ide> default: <ide> } <ide> case HOST_REMOVED: <ide> case HOST_UPDATED: <ide> log.trace("Schedule Host event {}", event); <del> hostEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); <add> hostEventExecutor.execute(new InternalEventHandler(event)); <ide> break; <ide> default: <ide> log.warn("Unsupported host event type: {}", event.type()); <ide> case SINKS_REMOVED: <ide> case ROUTE_REMOVED: <ide> log.trace("Schedule Mcast event {}", event); <del> mcastEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); <add> mcastEventExecutor.execute(new InternalEventHandler(event)); <ide> break; <ide> case ROUTE_ADDED: <ide> default: <ide> case ROUTE_REMOVED: <ide> case ALTERNATIVE_ROUTES_CHANGED: <ide> log.trace("Schedule Route event {}", event); <del> routeEventExecutor.schedule(new InternalEventHandler(event), 100, TimeUnit.MILLISECONDS); <add> routeEventExecutor.execute(new InternalEventHandler(event)); <ide> break; <ide> default: <ide> log.warn("Unsupported route event type: {}", event.type());
Java
mit
4031b1fe3e4362780d07dee87ef3f0b191071287
0
vikas17a/Algorithm-Implementations,joshimoo/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,vikas17a/Algorithm-Implementations,girishramnani/Algorithm-Implementations,jb1717/Algorithm-Implementations,jb1717/Algorithm-Implementations,kidaa/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,isalnikov/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,imanmafi/Algorithm-Implementations,Etiene/Algorithm-Implementations,Endika/Algorithm-Implementations,jiang42/Algorithm-Implementations,pravsingh/Algorithm-Implementations,imanmafi/Algorithm-Implementations,joshimoo/Algorithm-Implementations,pravsingh/Algorithm-Implementations,mishin/Algorithm-Implementations,kennyledet/Algorithm-Implementations,pravsingh/Algorithm-Implementations,warreee/Algorithm-Implementations,warreee/Algorithm-Implementations,joshimoo/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,rohanp/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,girishramnani/Algorithm-Implementations,Yonaba/Algorithm-Implementations,vikas17a/Algorithm-Implementations,Endika/Algorithm-Implementations,warreee/Algorithm-Implementations,rohanp/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,jiang42/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,movb/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,girishramnani/Algorithm-Implementations,movb/Algorithm-Implementations,movb/Algorithm-Implementations,mishin/Algorithm-Implementations,mishin/Algorithm-Implementations,warreee/Algorithm-Implementations,joshimoo/Algorithm-Implementations,girishramnani/Algorithm-Implementations,Endika/Algorithm-Implementations,imanmafi/Algorithm-Implementations,jb1717/Algorithm-Implementations,girishramnani/Algorithm-Implementations,Yonaba/Algorithm-Implementations,rohanp/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,jb1717/Algorithm-Implementations,rohanp/Algorithm-Implementations,Etiene/Algorithm-Implementations,mishin/Algorithm-Implementations,jb1717/Algorithm-Implementations,mishin/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,vikas17a/Algorithm-Implementations,mishin/Algorithm-Implementations,Endika/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,rohanp/Algorithm-Implementations,vikas17a/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,imanmafi/Algorithm-Implementations,kidaa/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,Endika/Algorithm-Implementations,Yonaba/Algorithm-Implementations,kennyledet/Algorithm-Implementations,Yonaba/Algorithm-Implementations,mishin/Algorithm-Implementations,kennyledet/Algorithm-Implementations,isalnikov/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,Endika/Algorithm-Implementations,warreee/Algorithm-Implementations,Endika/Algorithm-Implementations,imanmafi/Algorithm-Implementations,kidaa/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,rohanp/Algorithm-Implementations,jiang42/Algorithm-Implementations,Etiene/Algorithm-Implementations,Yonaba/Algorithm-Implementations,Yonaba/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,kennyledet/Algorithm-Implementations,rohanp/Algorithm-Implementations,rohanp/Algorithm-Implementations,jiang42/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,movb/Algorithm-Implementations,rohanp/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,Yonaba/Algorithm-Implementations,kidaa/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,warreee/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,kidaa/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,imanmafi/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,kennyledet/Algorithm-Implementations,imanmafi/Algorithm-Implementations,movb/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,vikas17a/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,isalnikov/Algorithm-Implementations,kidaa/Algorithm-Implementations,warreee/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,kennyledet/Algorithm-Implementations,joshimoo/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,kennyledet/Algorithm-Implementations,warreee/Algorithm-Implementations,kidaa/Algorithm-Implementations,pravsingh/Algorithm-Implementations,Yonaba/Algorithm-Implementations,Etiene/Algorithm-Implementations,isalnikov/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,imanmafi/Algorithm-Implementations,vikas17a/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,jiang42/Algorithm-Implementations,vikas17a/Algorithm-Implementations,imanmafi/Algorithm-Implementations,kennyledet/Algorithm-Implementations,kennyledet/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,movb/Algorithm-Implementations,pravsingh/Algorithm-Implementations,Endika/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,girishramnani/Algorithm-Implementations,warreee/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,jiang42/Algorithm-Implementations,isalnikov/Algorithm-Implementations,movb/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,vikas17a/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,rohanp/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,Endika/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,Yonaba/Algorithm-Implementations,kennyledet/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,mishin/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,Etiene/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,joshimoo/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,isalnikov/Algorithm-Implementations,kidaa/Algorithm-Implementations,jb1717/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,rohanp/Algorithm-Implementations,kidaa/Algorithm-Implementations,imanmafi/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,jb1717/Algorithm-Implementations,jiang42/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,isalnikov/Algorithm-Implementations,jb1717/Algorithm-Implementations,pravsingh/Algorithm-Implementations,kennyledet/Algorithm-Implementations,vikas17a/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,jiang42/Algorithm-Implementations,pravsingh/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,Yonaba/Algorithm-Implementations,movb/Algorithm-Implementations,imanmafi/Algorithm-Implementations,isalnikov/Algorithm-Implementations,mishin/Algorithm-Implementations,isalnikov/Algorithm-Implementations,rohanp/Algorithm-Implementations,isalnikov/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,Etiene/Algorithm-Implementations,Etiene/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,pravsingh/Algorithm-Implementations,imanmafi/Algorithm-Implementations,jiang42/Algorithm-Implementations,vikas17a/Algorithm-Implementations,rohanp/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,Endika/Algorithm-Implementations,Yonaba/Algorithm-Implementations,Yonaba/Algorithm-Implementations,Endika/Algorithm-Implementations,pravsingh/Algorithm-Implementations,Etiene/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,joshimoo/Algorithm-Implementations,jb1717/Algorithm-Implementations,jiang42/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,girishramnani/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,imanmafi/Algorithm-Implementations,movb/Algorithm-Implementations,isalnikov/Algorithm-Implementations,jb1717/Algorithm-Implementations,warreee/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,joshimoo/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,girishramnani/Algorithm-Implementations,warreee/Algorithm-Implementations,pravsingh/Algorithm-Implementations,kidaa/Algorithm-Implementations,Yonaba/Algorithm-Implementations,kennyledet/Algorithm-Implementations,Endika/Algorithm-Implementations,movb/Algorithm-Implementations,n1ghtmare/Algorithm-Implementations,warreee/Algorithm-Implementations,Endika/Algorithm-Implementations,pravsingh/Algorithm-Implementations,movb/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,warreee/Algorithm-Implementations,kidaa/Algorithm-Implementations,warreee/Algorithm-Implementations,girishramnani/Algorithm-Implementations,jb1717/Algorithm-Implementations,joshimoo/Algorithm-Implementations,joshimoo/Algorithm-Implementations,isalnikov/Algorithm-Implementations,Endika/Algorithm-Implementations,Etiene/Algorithm-Implementations,mishin/Algorithm-Implementations,pravsingh/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,jb1717/Algorithm-Implementations,movb/Algorithm-Implementations,warreee/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,girishramnani/Algorithm-Implementations,rohanp/Algorithm-Implementations,warreee/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,girishramnani/Algorithm-Implementations,vikas17a/Algorithm-Implementations,jiang42/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,joshimoo/Algorithm-Implementations,Endika/Algorithm-Implementations,pravsingh/Algorithm-Implementations,Yonaba/Algorithm-Implementations,Etiene/Algorithm-Implementations,Etiene/Algorithm-Implementations,kennyledet/Algorithm-Implementations,Etiene/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,rohanp/Algorithm-Implementations,kidaa/Algorithm-Implementations,Yonaba/Algorithm-Implementations,vikas17a/Algorithm-Implementations,movb/Algorithm-Implementations,joshimoo/Algorithm-Implementations,kennyledet/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,imanmafi/Algorithm-Implementations,jb1717/Algorithm-Implementations,isalnikov/Algorithm-Implementations,isalnikov/Algorithm-Implementations,jiang42/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,movb/Algorithm-Implementations,joshimoo/Algorithm-Implementations,pravsingh/Algorithm-Implementations,kennyledet/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,kidaa/Algorithm-Implementations,mishin/Algorithm-Implementations,kidaa/Algorithm-Implementations,mishin/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,jiang42/Algorithm-Implementations,vikas17a/Algorithm-Implementations,imanmafi/Algorithm-Implementations,mishin/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,joshimoo/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,Etiene/Algorithm-Implementations,mishin/Algorithm-Implementations,vikas17a/Algorithm-Implementations,pravsingh/Algorithm-Implementations,kidaa/Algorithm-Implementations,Sweet-kid/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,mishin/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,movb/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,jb1717/Algorithm-Implementations,isalnikov/Algorithm-Implementations,jiang42/Algorithm-Implementations,joshimoo/Algorithm-Implementations,varunparkhe/Algorithm-Implementations,Etiene/Algorithm-Implementations,praveenjha527/Algorithm-Implementations,sugiartocokrowibowo/Algorithm-Implementations,girishramnani/Algorithm-Implementations,imrandomizer/Algorithm-Implementations,aayushKumarJarvis/Algorithm-Implementations,jiang42/Algorithm-Implementations,jb1717/Algorithm-Implementations,varunparkhe/Algorithm-Implementations
Caesar-Cipher/Java/irisyuan/caesarCipher.java
import java.util.Scanner; public class caesarCipher { // cipher(String,int) returns original message shifted by 3 letters // if input is numerical, adds 3 to number public static char[] cipher(String message, int shift) { // declare variables String s = message; // convert string to array of chars char[] letters = message.toCharArray(); for(int i = 0; i < message.length(); i++){ letters[i] += 3; } return letters; } // main method takes user input and calls cipher public static void main(String[] args) { Scanner input = new Scanner(System.in); System.out.println("Plaintext: "); String message = input.nextLine(); System.out.println("Ciphertext:"); System.out.println(cipher(message, 3)); } }
Delete caesarCipher.java
Caesar-Cipher/Java/irisyuan/caesarCipher.java
Delete caesarCipher.java
<ide><path>aesar-Cipher/Java/irisyuan/caesarCipher.java <del>import java.util.Scanner; <del> <del>public class caesarCipher { <del> <del> // cipher(String,int) returns original message shifted by 3 letters <del> // if input is numerical, adds 3 to number <del> public static char[] cipher(String message, int shift) { <del> <del> // declare variables <del> String s = message; <del> // convert string to array of chars <del> char[] letters = message.toCharArray(); <del> <del> for(int i = 0; i < message.length(); i++){ <del> letters[i] += 3; <del> } <del> return letters; <del> } <del> <del> // main method takes user input and calls cipher <del> public static void main(String[] args) { <del> <del> Scanner input = new Scanner(System.in); <del> System.out.println("Plaintext: "); <del> String message = input.nextLine(); <del> <del> System.out.println("Ciphertext:"); <del> System.out.println(cipher(message, 3)); <del> } <del> <del>}
JavaScript
mit
375cbf49deb1cab25693012173b8ae573e5cb2c6
0
bdemirkir/socket.io,pandeysoni/socket.io,swrnvinod40/socket.io,morpheus-med/socket.io,ceosilvajr/socket.io,nicolasq92/socket.io,shreyasns/socket.io,ijidan/socket.io,pmq20/socket.io,manubb/socket.io,kolomiichenko/socket.io,darkshOw/socket.io,lattmann/socket.io,darrachequesne/socket.io,jmptrader/socket.io,samsonradu/socket.io,alfredayibonte/socket.io,maximesong/socket.io,dogmacreative/socket.io,wxkdesky/socket.io,jpallen/socket.io,cstryker/socket.io,developmentva/socket.io,odeke-em/socket.io,mailzwj/socket.io,RonakMehta25/socket.io,wuxianghou/socket.io,shapeshed/socket.io,ivkodenis92/socket.io,gsklee/socket.io,RabidFire/socket.io,cantlin/socket.io,jahanzaibbahadur/socket.io,nicolasq92/socket.io,socketio/socket.io,privman/socket.io,sonewman/socket.io,SimonHawesome/socket.io,pbeardshear/socket.io,crickeys/socket.io,devspacenine/socket.io,bratchenko/socket.io,travisperson/socket.io,nkzawa/socket.io,rubiii/socket.io,yey/socket.io,CJLamy/socket.io,Automattic/socket.io,luckyan315/socket.io,PierreMartin/socket.io,mvnnn/socket.io,jmadden85/chat,itplanes/socket.io,livechat/socket.io,odeke-em/socket.io,heavenlw/socket.io,elisee/socket.io,galileoguzman/socket.io,CristianChris/socket.io,amolv/socket.io,kissio/kiss.io,etoccalino/socket.io,glabcn/socket.io,tulpar008/socket.io,kapouer/socket.io,FGRibreau/socket.io,lookio/socket.io,lattmann/socket.io,bevacqua/socket.io,arxpoetica/socket.io,hello2009chen/socket.io,wpsmith/socket.io,trmchale1/fake2,get/socket.io,zjsxwc/socket.io,gravis/socket.io,theoreticaLee/socket.io,nulltask/socket.io,SpiderStrategies/socket.io,youprofit/socket.io,tejasmanohar/socket.io,jondubois/socket.io,mazekkkk/socket.io,HoopCHINA/socket.io,WahilaCreative/socket.io,kapouer/socket.io,jessewar/socket.io,DeadAlready/socket.io,sanvibyfish/socket.io,real-chocopanda/socket.io,qgd1987/socket.io,jbaumbach/socket.io,Klab-Berlin/socket.io,SLM-Linus/socket.io,higunjan/socket.io,jayeshdhakecha/socket.io,linalu1/socket.io,sahil87/socket.io,falihakz/socket.io,Integ/socket.io,delebash/socket.io,0x90sled/socket.io,g29times/socket.io,l3dlp/socket.io,glabcn/socket.io,pandemosth/socket.io,JstnRegi/socket.io,kakalong/socket.io,uken/socket.io,GICodeWarrior/socket.io,galileoguzman/socket.io,ceosilvajr/socket.io,litixsoft/socket.io,twhitbeck/socket.io,jainilpatel/socket.io,SimenB/socket.io,natefaubion/socket.io,chapuzzo/socket.io,mikedevita/socket.io,imjerrybao/socket.io,anhisme/socket.io,BionicClick/socket.io,matthewmueller/socket.io,brianconnoly/socket.io,PerformanceHorizonGroup/socket.io,judev/socket.io,skipness/socket.io,aaronjensen/socket.io,danpetrikin/socket.io,nightblood/socket.io,marcelaraujo/socket.io,mosoft521/socket.io,acqiang/socket.io,eetac/socket.io,lzyzsd/socket.io,josuevera/tc,Reregistered/socket.io,PierreMartin/socket.io,jmptrader/socket.io,noppoMan/socket.io,sirbrillig/socket.io,dakoo/socket.io,nick/socket.io,markovuksanovic/socket.io,grant/socket.io,lukw00/socket.io,mkuklis/socket.io,rohittiwarirvt/socket.io,anorthwolf/socket.io,balupton/socket.io,imshibaji/socket.io,fernandosc/socket.io,tim-speed/socket.io,ahmed-masud/socket.io,dakoo/socket.io,tsabolov/socket.io,socketio/socket.io,fmarais/socket.io,ifraixedes/socket.io,lopper/socket.io,procandi/socket.io,mohsennazari/socket,mazekkkk/socket.io,shripadk/socket.io,gavin-lai/socket.io,gdi2290/socket.io,ader/socket.io,chirilo/socket.io,xdenser/socket.io,manubb/socket.io,denghongcai/socket.io,Sharmapankaj3187/DropboxChatClient,fredrikbonander/socket.io,Manningham/socket.io,liorsion/socket.io,ThisIsPuzzle/socket.io,WE-SOUTHENERS/socket.io,adronfan/socket.io,leeight/socket.io,VimVincent/socket.io,douglasvegas/socket.io,Tchu54/socket.io,mohsennazari/socket,dieface/socket.io,Automattic/socket.io,jmadden85/chat,fmarais/socket.io,pjump/socket.io,mosoft521/socket.io,hello2009chen/socket.io,kidozen/socket.io,goinstant/socket.io,linalu1/socket.io,muthhus/socket.io,amolv/socket.io,ereslibre/socket.io,baudehlo/socket.io,mikekidder/socket.io,engrabid411/socket.io,g29times/socket.io,DanCard0/ionic-chat-server,mogstad/socket.io,rmalecki/socket.io,Harsha03/socket.io,pjump/socket.io,alexlevy0/socket.io,CxRes/socket.io,CodeLoversAt/socket.io,ther12k/socket.io,CapeSepias/socket.io,enounca/socket.io,wanghongxing/socket.io,hlj2722/socket.io,Simonify/socket.io,amagee/socket.io,Nibbler999/socket.io,fernandosc/socket.io,l3dlp/socket.io,rase-/socket.io,delebash/socket.io,listepo/socket.io,yoinc/socket.io,jugglinmike/socket.io,kejyun/socket.io,SpainTrain/socket.io,DavidHHShao/socket.io,ijidan/socket.io,DDJarod/socket.io,chadyred/socket.io,virtual-world-framework/socket.io,Zacharyharu/Chat,lukascivil/socket.io,feugy/socket.io,xuzhijian17/socket.io,LoicMahieu/socket.io,youprofit/socket.io,sballesteros/socket.io,honger05/socket.io,xshwy/socket.io,121cast/socket.io,AnyPresence/socket.io,CodeKingdomsTeam/socket.io,guoyu07/socket.io,barseghyanartur/socket.io,skerit/socket.io,t-k/socket.io,mjgil/socket.io,XenYo91/socket.io,57-Wolve/socket.io,omaracrystal/Darkroom_PlayArea,wison/socket.io,ddude/socket.io,audreyt/socket.io,chirag04/socket.io,lntitbk/socket.io,RTHub/socket.io,bevacqua/socket.io,Aaron1011/socket.io,Jonekee/socket.io,xshwy/socket.io,CJLamy/socket.io,dbirchak/socket.io,lorenyu/socket.io,gavioto/socket.io,anorthwolf/socket.io,grin/socket.io,ddnbgroup/socket.io,r14r/fork_javascript_socket.io,zengzhining/socket.io,larskris/socket.io,yey/socket.io,sampsasaarela/socket.io,itadvisors/socket.io,juancancela/socket.io,tncdgn/socket.io,gshankar/socket.io,Sharmapankaj3187/DropboxChatClient,pandeysoni/socket.io,kwangkim/socket.io,skimcom/socket.io,kevin-roark/socket.io,olark/socket.io,FREEZX/socket.io,syuilo/socket.io,kevinsawicki/socket.io,sachintaware/socket.io,dkhunt27/socket.io,mcanthony/socket.io,qgd1987/socket.io,patilharshali/socket.io,bright-sparks/socket.io,darkshOw/socket.io,wison/socket.io,nearlyfreeapps/socket.io,saisai/socket.io,kidozen/socket.io,alfredayibonte/socket.io,sindicatoesp/socket.io,aashidham/gdoc,Nzaga/socket.io,DavidHHShao/socket.io,sonewman/socket.io,leeqiang/socket.io,audibene-em/socket.io,kwangkim/socket.io,ecarter/socket.io,xuzhijian17/socket.io,prabhatsubedi/socket.io,Anshdesire/socket.io,Tug/socket.io,deedw/socket.io,Integ/socket.io,falihakz/socket.io,ReikoR/socket.io,olivoil/socket.io,datjandra/socket.io,UsableHQ/socket.io,wuxianghou/socket.io,ridixcr/socket.io,thebigredgeek/socket.io,audibene-em/socket.io,higunjan/socket.io,r14r-work/fork_javascript_socket.io,agilethomas/socket.io,Dreae/socket.io,cynron/sio-lite,jlsuarezs/socket.io,aslijiasheng/socket.io,zjsxwc/socket.io,PUSEN/socket.io,bright-sparks/socket.io,procandi/socket.io,tjordanchat/socket.io
/*! * socket.io-node * Copyright(c) 2011 LearnBoost <[email protected]> * MIT Licensed */ /** * Module dependencies. */ var parser = require('./parser'); /** * Expose the constructor. */ exports = module.exports = Transport; /** * Transport constructor. * * @api public */ function Transport (mng, data, req) { this.manager = mng; this.id = data.id; this.disconnected = false; this.drained = true; this.buffer = []; this.handleRequest(req); }; /** * Access the logger. * * @api public */ Transport.prototype.__defineGetter__('log', function () { return this.manager.log; }); /** * Access the store. * * @api public */ Transport.prototype.__defineGetter__('store', function () { return this.manager.store; }); /** * Handles a request when it's set. * * @api private */ Transport.prototype.handleRequest = function (req) { this.log.debug('setting request', req.method, req.url); this.req = req; if (req.method == 'GET') { this.socket = req.socket; this.open = true; this.drained = true; this.setHeartbeatInterval(); this.setHandlers(); this.onSocketConnect(); } }; /** * Called when a connection is first set. * * @api private */ Transport.prototype.onSocketConnect = function () { }; /** * Sets transport handlers * * @api private */ Transport.prototype.setHandlers = function () { var self = this; // we need to do this in a pub/sub way since the client can POST the message // over a different socket (ie: different Transport instance) this.store.subscribe('heartbeat-clear:' + this.id, function () { self.onHeartbeatClear(); }); this.store.subscribe('disconnect-force:' + this.id, function () { self.onForcedDisconnect(); }); this.store.subscribe('dispatch:' + this.id, function (packet, volatile) { self.onDispatch(packet, volatile); }); this.bound = { end: this.onSocketEnd.bind(this) , close: this.onSocketClose.bind(this) , error: this.onSocketError.bind(this) , drain: this.onSocketDrain.bind(this) }; this.socket.on('end', this.bound.end); this.socket.on('close', this.bound.close); this.socket.on('error', this.bound.error); this.socket.on('drain', this.bound.drain); }; /** * Removes transport handlers * * @api private */ Transport.prototype.clearHandlers = function () { this.store.unsubscribe('disconnect-force:' + this.id); this.store.unsubscribe('heartbeat-clear:' + this.id); this.store.unsubscribe('dispatch:' + this.id); this.socket.removeListener('end', this.bound.end); this.socket.removeListener('close', this.bound.close); this.socket.removeListener('error', this.bound.error); this.socket.removeListener('drain', this.bound.drain); }; /** * Called when the connection dies * * @api private */ Transport.prototype.onSocketEnd = function () { this.end('socket end'); }; /** * Called when the connection dies * * @api private */ Transport.prototype.onSocketClose = function (error) { this.end(error ? 'socket error' : 'socket close'); }; /** * Called when the connection has an error. * * @api private */ Transport.prototype.onSocketError = function (err) { if (this.open) { this.socket.destroy(); this.onClose(); } this.log.info('socket error ' + err.stack); }; /** * Called when the connection is drained. * * @api private */ Transport.prototype.onSocketDrain = function () { this.drained = true; }; /** * Called upon receiving a heartbeat packet. * * @api private */ Transport.prototype.onHeartbeatClear = function () { this.clearHeartbeatTimeout(); this.setHeartbeatInterval(); }; /** * Called upon a forced disconnection. * * @api private */ Transport.prototype.onForcedDisconnect = function () { if (!this.disconnected) { this.log.info('transport end by forced client disconnection'); if (this.open) { this.packet({ type: 'disconnect' }); } this.end('booted'); } }; /** * Dispatches a packet. * * @api private */ Transport.prototype.onDispatch = function (packet, volatile) { if (volatile) { this.writeVolatile(packet); } else { this.write(packet); } }; /** * Sets the close timeout. */ Transport.prototype.setCloseTimeout = function () { if (!this.closeTimeout) { var self = this; this.closeTimeout = setTimeout(function () { self.log.debug('fired close timeout for client', self.id); self.closeTimeout = null; self.end('close timeout'); }, this.manager.get('close timeout') * 1000); this.log.debug('set close timeout for client', this.id); } }; /** * Clears the close timeout. */ Transport.prototype.clearCloseTimeout = function () { if (this.closeTimeout) { clearTimeout(this.closeTimeout); this.closeTimeout = null; this.log.debug('cleared close timeout for client', this.id); } }; /** * Sets the heartbeat timeout */ Transport.prototype.setHeartbeatTimeout = function () { if (!this.heartbeatTimeout) { var self = this; this.heartbeatTimeout = setTimeout(function () { self.log.debug('fired heartbeat timeout for client', self.id); self.heartbeatTimeout = null; self.end('heartbeat timeout'); }, this.manager.get('heartbeat timeout') * 1000); this.log.debug('set heartbeat timeout for client', this.id); } }; /** * Clears the heartbeat timeout * * @param text */ Transport.prototype.clearHeartbeatTimeout = function () { if (this.heartbeatTimeout) { clearTimeout(this.heartbeatTimeout); this.heartbeatTimeout = null; this.log.debug('cleared heartbeat timeout for client', this.id); } }; /** * Sets the heartbeat interval. To be called when a connection opens and when * a heartbeat is received. * * @api private */ Transport.prototype.setHeartbeatInterval = function () { if (!this.heartbeatInterval) { var self = this; this.heartbeatInterval = setTimeout(function () { self.heartbeat(); self.heartbeatInterval = null; }, this.manager.get('heartbeat interval') * 1000); this.log.debug('set heartbeat interval for client', this.id); } }; /** * Clears all timeouts. * * @api private */ Transport.prototype.clearTimeouts = function () { this.clearCloseTimeout(); this.clearHeartbeatTimeout(); this.clearHeartbeatInterval(); }; /** * Sends a heartbeat * * @api private */ Transport.prototype.heartbeat = function () { if (this.open) { this.log.debug('emitting heartbeat for client', this.id); this.packet({ type: 'heartbeat' }); this.setHeartbeatTimeout(); } return this; }; /** * Handles a message. * * @param {Object} packet object * @api private */ Transport.prototype.onMessage = function (packet) { if ('heartbeat' == packet.type) { this.log.debug('got heartbeat packet'); this.store.heartbeat(this.id); } else if ('disconnect' == packet.type && packet.endpoint == '') { this.log.debug('got disconnection packet'); this.store.disconnect(this.id, true); } else { this.log.debug('got packet'); if (packet.id && packet.ack != 'data') { this.log.debug('acknowledging packet automatically'); this.store.client(this.id).publish(parser.encodePacket({ type: 'ack' , ackId: packet.id , endpoint: packet.endpoint || '' })); } this.store.message(this.id, packet); } }; /** * Clears the heartbeat interval * * @api private */ Transport.prototype.clearHeartbeatInterval = function () { if (this.heartbeatInterval) { clearTimeout(this.heartbeatInterval); this.heartbeatInterval = null; this.log.debug('cleared heartbeat interval for client', this.id); } }; /** * Finishes the connection and makes sure client doesn't reopen * * @api private */ Transport.prototype.disconnect = function (reason) { this.packet({ type: 'disconnect' }); this.end(false, reason); return this; }; /** * Closes the connection. * * @api private */ Transport.prototype.close = function () { if (this.open) { this.doClose(); this.onClose(); } }; /** * Called upon a connection close. * * @api private */ Transport.prototype.onClose = function () { if (this.open) { this.setCloseTimeout(); this.unsubscribe(); this.open = false; } }; /** * Cleans up the connection, considers the client disconnected. * * @api private */ Transport.prototype.end = function (forced, reason) { if (!this.disconnected) { this.log.info('ending socket'); this.close(); this.clearTimeouts(); if (!forced) this.store.disconnect(this.id, false, reason); this.disconnected = true; } }; /** * Signals that the transport can start flushing buffers. * * @api public */ Transport.prototype.resume = function () { if (!this.disconnected) { this.paused = false; this.setHeartbeatInterval(); this.subscribe(); } return this; }; /** * Signals that the transport should pause and buffer data. * * @api public */ Transport.prototype.pause = function () { this.paused = true; return this; }; /** * Writes an error packet with the specified reason and advice. * * @param {Number} advice * @param {Number} reason * @api public */ Transport.prototype.error = function (reason, advice) { this.packet({ type: 'error' , reason: reason , advice: advice }); this.log.warn(reason, advice ? ('client should ' + advice) : ''); this.end(false, 'error'); }; /** * Write a packet. * * @api public */ Transport.prototype.packet = function (obj) { return this.write(parser.encodePacket(obj)); }; /** * Subscribe client. * * @api private */ Transport.prototype.subscribe = function () { if (!this.subscribed) { this.log.debug('subscribing', this.id); var self = this; // subscribe to buffered + normal messages this.store.client(this.id).consume(function (payload, packet) { if (payload) { self.payload(payload); } else { self.write(packet); } }); this.subscribed = true; } }; /** * Unsubscribe client. * * @api private */ Transport.prototype.unsubscribe = function () { this.log.info('unsubscribing', this.id); this.store.client(this.id).pause(); this.subscribed = false; }; /** * Writes a volatile message. * * @api private */ Transport.prototype.writeVolatile = function (msg) { if (this.open) { if (this.drained) { this.write(msg); } else { this.log.debug('ignoring volatile packet, buffer not drained'); } } else { this.log.debug('ignoring volatile packet, transport not open'); } };
lib/transport.js
/*! * socket.io-node * Copyright(c) 2011 LearnBoost <[email protected]> * MIT Licensed */ /** * Module dependencies. */ var parser = require('./parser'); /** * Expose the constructor. */ exports = module.exports = Transport; /** * Transport constructor. * * @api public */ function Transport (mng, data, req) { this.manager = mng; this.id = data.id; this.disconnected = false; this.drained = true; this.buffer = []; this.handleRequest(req); }; /** * Access the logger. * * @api public */ Transport.prototype.__defineGetter__('log', function () { return this.manager.log; }); /** * Access the store. * * @api public */ Transport.prototype.__defineGetter__('store', function () { return this.manager.store; }); /** * Handles a request when it's set. * * @api private */ Transport.prototype.handleRequest = function (req) { this.log.debug('setting request', req.method, req.url); this.req = req; if (req.method == 'GET') { this.socket = req.socket; this.open = true; this.drained = true; this.setHeartbeatInterval(); this.setHandlers(); this.onSocketConnect(); } }; /** * Called when a connection is first set. * * @api private */ Transport.prototype.onSocketConnect = function () { }; /** * Sets transport handlers * * @api private */ Transport.prototype.setHandlers = function () { var self = this; // we need to do this in a pub/sub way since the client can POST the message // over a different socket (ie: different Transport instance) this.store.subscribe('heartbeat-clear:' + this.id, function () { self.onHeartbeatClear(); }); this.store.subscribe('disconnect-force:' + this.id, function () { self.onForcedDisconnect(); }); this.store.subscribe('dispatch:' + this.id, function (packet, volatile) { self.onDispatch(packet, volatile); }); this.bound = { end: this.onSocketEnd.bind(this) , close: this.onSocketClose.bind(this) , error: this.onSocketError.bind(this) , drain: this.onSocketDrain.bind(this) }; this.socket.on('end', this.bound.end); this.socket.on('close', this.bound.close); this.socket.on('error', this.bound.error); this.socket.on('drain', this.bound.drain); }; /** * Removes transport handlers * * @api private */ Transport.prototype.clearHandlers = function () { this.store.unsubscribe('disconnect-force:' + this.id); this.store.unsubscribe('heartbeat-clear:' + this.id); this.store.unsubscribe('dispatch:' + this.id); this.socket.removeListener('end', this.bound.end); this.socket.removeListener('close', this.bound.close); this.socket.removeListener('error', this.bound.error); this.socket.removeListener('drain', this.bound.drain); }; /** * Called when the connection dies * * @api private */ Transport.prototype.onSocketEnd = function () { this.end('socket end'); }; /** * Called when the connection dies * * @api private */ Transport.prototype.onSocketClose = function (error) { this.end(error ? 'socket error' : 'socket close'); }; /** * Called when the connection has an error. * * @api private */ Transport.prototype.onSocketError = function (err) { if (this.open) { this.socket.destroy(); this.onClose(); } this.log.info('socket error ' + err.stack); }; /** * Called when the connection is drained. * * @api private */ Transport.prototype.onSocketDrain = function () { this.drained = true; }; /** * Called upon receiving a heartbeat packet. * * @api private */ Transport.prototype.onHeartbeatClear = function () { this.clearHeartbeatTimeout(); this.setHeartbeatInterval(); }; /** * Called upon a forced disconnection. * * @api private */ Transport.prototype.onForcedDisconnect = function () { if (!this.disconnected) { this.log.info('transport end by forced client disconnection'); if (this.open) { this.packet({ type: 'disconnect' }); } this.end('booted'); } }; /** * Dispatches a packet. * * @api private */ Transport.prototype.onDispatch = function (packet, volatile) { if (volatile) { this.writeVolatile(packet); } else { this.write(packet); } }; /** * Sets the close timeout. */ Transport.prototype.setCloseTimeout = function () { if (!this.closeTimeout) { var self = this; this.closeTimeout = setTimeout(function () { self.log.debug('fired close timeout for client', self.id); self.closeTimeout = null; self.end('close timeout'); }, this.manager.get('close timeout') * 1000); this.log.debug('set close timeout for client', this.id); } }; /** * Clears the close timeout. */ Transport.prototype.clearCloseTimeout = function () { if (this.closeTimeout) { clearTimeout(this.closeTimeout); this.closeTimeout = null; this.log.debug('cleared close timeout for client', this.id); } }; /** * Sets the heartbeat timeout */ Transport.prototype.setHeartbeatTimeout = function () { if (!this.heartbeatTimeout) { var self = this; this.heartbeatTimeout = setTimeout(function () { self.log.debug('fired heartbeat timeout for client', self.id); self.heartbeatTimeout = null; self.end('heartbeat timeout'); }, this.manager.get('heartbeat timeout') * 1000); this.log.debug('set heartbeat timeout for client', this.id); } }; /** * Clears the heartbeat timeout * * @param text */ Transport.prototype.clearHeartbeatTimeout = function () { if (this.heartbeatTimeout) { clearTimeout(this.heartbeatTimeout); this.heartbeatTimeout = null; this.log.debug('cleared heartbeat timeout for client', this.id); } }; /** * Sets the heartbeat interval. To be called when a connection opens and when * a heartbeat is received. * * @api private */ Transport.prototype.setHeartbeatInterval = function () { if (!this.heartbeatTimeout) { var self = this; this.heartbeatInterval = setTimeout(function () { self.heartbeat(); }, this.manager.get('heartbeat interval') * 1000); this.log.debug('set heartbeat interval for client', this.id); } }; /** * Clears all timeouts. * * @api private */ Transport.prototype.clearTimeouts = function () { this.clearCloseTimeout(); this.clearHeartbeatTimeout(); this.clearHeartbeatInterval(); }; /** * Sends a heartbeat * * @api private */ Transport.prototype.heartbeat = function () { if (this.open) { this.log.debug('emitting heartbeat for client', this.id); this.packet({ type: 'heartbeat' }); this.setHeartbeatTimeout(); } return this; }; /** * Handles a message. * * @param {Object} packet object * @api private */ Transport.prototype.onMessage = function (packet) { if ('heartbeat' == packet.type) { this.log.debug('got heartbeat packet'); this.store.heartbeat(this.id); } else if ('disconnect' == packet.type && packet.endpoint == '') { this.log.debug('got disconnection packet'); this.store.disconnect(this.id, true); } else { this.log.debug('got packet'); if (packet.id && packet.ack != 'data') { this.log.debug('acknowledging packet automatically'); this.store.client(this.id).publish(parser.encodePacket({ type: 'ack' , ackId: packet.id , endpoint: packet.endpoint || '' })); } this.store.message(this.id, packet); } }; /** * Clears the heartbeat interval * * @api private */ Transport.prototype.clearHeartbeatInterval = function () { if (this.heartbeatInterval) { clearTimeout(this.heartbeatInterval); this.heartbeatInterval = null; this.log.debug('cleared heartbeat interval for client', this.id); } }; /** * Finishes the connection and makes sure client doesn't reopen * * @api private */ Transport.prototype.disconnect = function (reason) { this.packet({ type: 'disconnect' }); this.end(false, reason); return this; }; /** * Closes the connection. * * @api private */ Transport.prototype.close = function () { if (this.open) { this.doClose(); this.onClose(); } }; /** * Called upon a connection close. * * @api private */ Transport.prototype.onClose = function () { if (this.open) { this.setCloseTimeout(); this.unsubscribe(); this.open = false; } }; /** * Cleans up the connection, considers the client disconnected. * * @api private */ Transport.prototype.end = function (forced, reason) { if (!this.disconnected) { this.log.info('ending socket'); this.close(); this.clearTimeouts(); if (!forced) this.store.disconnect(this.id, false, reason); this.disconnected = true; } }; /** * Signals that the transport can start flushing buffers. * * @api public */ Transport.prototype.resume = function () { if (!this.disconnected) { this.paused = false; this.setHeartbeatInterval(); this.subscribe(); } return this; }; /** * Signals that the transport should pause and buffer data. * * @api public */ Transport.prototype.pause = function () { this.paused = true; return this; }; /** * Writes an error packet with the specified reason and advice. * * @param {Number} advice * @param {Number} reason * @api public */ Transport.prototype.error = function (reason, advice) { this.packet({ type: 'error' , reason: reason , advice: advice }); this.log.warn(reason, advice ? ('client should ' + advice) : ''); this.end(false, 'error'); }; /** * Write a packet. * * @api public */ Transport.prototype.packet = function (obj) { return this.write(parser.encodePacket(obj)); }; /** * Subscribe client. * * @api private */ Transport.prototype.subscribe = function () { if (!this.subscribed) { this.log.debug('subscribing', this.id); var self = this; // subscribe to buffered + normal messages this.store.client(this.id).consume(function (payload, packet) { if (payload) { self.payload(payload); } else { self.write(packet); } }); this.subscribed = true; } }; /** * Unsubscribe client. * * @api private */ Transport.prototype.unsubscribe = function () { this.log.info('unsubscribing', this.id); this.store.client(this.id).pause(); this.subscribed = false; }; /** * Writes a volatile message. * * @api private */ Transport.prototype.writeVolatile = function (msg) { if (this.open) { if (this.drained) { this.write(msg); } else { this.log.debug('ignoring volatile packet, buffer not drained'); } } else { this.log.debug('ignoring volatile packet, transport not open'); } };
Important fix for hearbeats
lib/transport.js
Important fix for hearbeats
<ide><path>ib/transport.js <ide> */ <ide> <ide> Transport.prototype.setHeartbeatInterval = function () { <del> if (!this.heartbeatTimeout) { <add> if (!this.heartbeatInterval) { <ide> var self = this; <ide> <ide> this.heartbeatInterval = setTimeout(function () { <ide> self.heartbeat(); <add> self.heartbeatInterval = null; <ide> }, this.manager.get('heartbeat interval') * 1000); <ide> <ide> this.log.debug('set heartbeat interval for client', this.id);
Java
apache-2.0
7432c5255dccbe316b39e7ddfbe45c149b6aa31f
0
freiheit-com/wicket,klopfdreh/wicket,martin-g/wicket-osgi,mafulafunk/wicket,mosoft521/wicket,mosoft521/wicket,martin-g/wicket-osgi,Servoy/wicket,bitstorm/wicket,aldaris/wicket,astrapi69/wicket,topicusonderwijs/wicket,Servoy/wicket,dashorst/wicket,apache/wicket,dashorst/wicket,freiheit-com/wicket,astrapi69/wicket,bitstorm/wicket,bitstorm/wicket,AlienQueen/wicket,AlienQueen/wicket,freiheit-com/wicket,mosoft521/wicket,dashorst/wicket,mafulafunk/wicket,aldaris/wicket,astrapi69/wicket,zwsong/wicket,AlienQueen/wicket,bitstorm/wicket,topicusonderwijs/wicket,zwsong/wicket,freiheit-com/wicket,apache/wicket,mosoft521/wicket,AlienQueen/wicket,klopfdreh/wicket,klopfdreh/wicket,klopfdreh/wicket,topicusonderwijs/wicket,zwsong/wicket,mosoft521/wicket,topicusonderwijs/wicket,dashorst/wicket,selckin/wicket,selckin/wicket,AlienQueen/wicket,martin-g/wicket-osgi,klopfdreh/wicket,selckin/wicket,dashorst/wicket,aldaris/wicket,Servoy/wicket,Servoy/wicket,astrapi69/wicket,aldaris/wicket,freiheit-com/wicket,aldaris/wicket,apache/wicket,Servoy/wicket,selckin/wicket,mafulafunk/wicket,selckin/wicket,zwsong/wicket,topicusonderwijs/wicket,apache/wicket,bitstorm/wicket,apache/wicket
/* * $Id$ * $Revision$ * $Date$ * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package wicket.util.thread; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import wicket.util.time.Duration; import wicket.util.time.Time; /** * Runs a block of code periodically. The Task can be started at a given * time in the future and can be a daemon. The block of code will be passed * a Log object each time it is run through its ICode interface. * <p> * If the code block takes longer than the period to run, the next task * invocation will occur immediately. In this case, tasks will not occur * at precise multiples of the period. For example, if you run a task * every 30 seconds, and the first run takes 40 seconds but the second * takes 20 seconds, your task will be invoked at 0 seconds, 40 seconds * and 70 seconds (40 seconds + 30 seconds), which is not an even multiple * of 30 seconds. * <p> * In general, this is a simple task class designed for polling activities. * If you need precise guarantees, you probably should be using a different * task class. * * @author Jonathan Locke */ public final class Task { /** The name of this task. */ private final String name; /** The time that the task should start. */ private Time startTime = Time.now(); /** True if the task's thread should be a daemon. */ private boolean isDaemon = true; /** True if the tasks's thread has already started executing. */ private boolean isStarted = false; /** The log to give to the user's code. */ private Log log = null; /** * Constructor. * @param name The name of this task */ public Task(final String name) { this.name = name; } /** * Runs this task at the given frequency. You may only call this method * if the task has not yet been started. If the task is already running, * an IllegalStateException will be thrown. * @param frequency The frequency at which to run the code * @param code The code to run * @throws IllegalStateException Thrown if task is already running */ public synchronized final void run(final Duration frequency, final ICode code) { if (!isStarted) { final Runnable runnable = new Runnable() { public void run() { // Sleep until start time startTime.fromNow().sleep(); while (true) { // Get the start of the current period final Time startOfPeriod = Time.now(); try { // Run the user's code code.run(getLog()); } catch (Exception e) { getLog().error("Unhandled exception thrown by user code in task " + name, e); } // Sleep until the period is over (or not at all if it's // already passed) startOfPeriod.add(frequency).fromNow().sleep(); } } }; // Start the thread final Thread thread = new Thread(runnable, name + " Task"); thread.setDaemon(isDaemon); thread.start(); // We're started all right! isStarted = true; } else { throw new IllegalStateException ("Attempt to start task that has already been started"); } } /** * Gets the log. * @return the log */ protected Log getLog() { if(log == null) log = LogFactory.getLog(Task.class); return log; } /** * Sets start time for this task. You cannot set the start time for a task which * is already running. If you attempt to, an IllegalStateException will be thrown. * @param startTime The time this task should start running * @throws IllegalStateException Thrown if task is already running */ public synchronized void setStartTime(final Time startTime) { if (isStarted) { throw new IllegalStateException ("Attempt to set start time of task that has already been started"); } this.startTime = startTime; } /** * Set daemon or not. For obvious reasons, this value can only be set * before the task starts running. If you attempt to set this value * after the task starts running, an IllegalStateException will be thrown. * @param daemon True if this task's thread should be a daemon * @throws IllegalStateException Thrown if task is already running */ public synchronized void setDaemon(final boolean daemon) { if (isStarted) { throw new IllegalStateException ("Attempt to set daemon state of a task that has already been started"); } isDaemon = daemon; } /** * Set log for user code to log to when task runs. * @param log The log */ public synchronized void setLog(final Log log) { this.log = log; } /** * @see java.lang.Object#toString() */ public String toString() { return "[name=" + name + ", startTime=" + startTime + ", isDaemon=" + isDaemon + ", isStarted=" + isStarted + ", codeListener=" + log + "]"; } }
wicket/src/java/wicket/util/thread/Task.java
/* * $Id$ * $Revision$ * $Date$ * * ==================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package wicket.util.thread; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import wicket.util.time.Duration; import wicket.util.time.Time; /** * Runs a block of code periodically. The Task can be started at a given * time in the future and can be a daemon. The block of code will be passed * a Log object each time it is run through its ICode interface. * <p> * If the code block takes longer than the period to run, the next task * invocation will occur immediately. In this case, tasks will not occur * at precise multiples of the period. For example, if you run a task * every 30 seconds, and the first run takes 40 seconds but the second * takes 20 seconds, your task will be invoked at 0 seconds, 40 seconds * and 70 seconds (40 seconds + 30 seconds), which is not an even multiple * of 30 seconds. * <p> * In general, this is a simple task class designed for polling activities. * If you need precise guarantees, you probably should be using a different * task class. * * @author Jonathan Locke */ public final class Task { /** The name of this task. */ private final String name; /** The time that the task should start. */ private Time startTime = Time.now(); /** True if the task's thread should be a daemon. */ private boolean isDaemon = true; /** True if the tasks's thread has already started executing. */ private boolean isStarted = false; /** The log to give to the user's code. */ private Log log = null; /** * Constructor. * @param name The name of this task */ public Task(final String name) { this.name = name; } /** * Runs this task at the given frequency. You may only call this method * if the task has not yet been started. If the task is already running, * an IllegalStateException will be thrown. * @param frequency The frequency at which to run the code * @param code The code to run * @throws IllegalStateException Thrown if task is already running */ public synchronized final void run(final Duration frequency, final ICode code) { if (!isStarted) { final Runnable runnable = new Runnable() { public void run() { // Sleep until start time startTime.fromNow().sleep(); while (true) { // Get the start of the current period final Time startOfPeriod = Time.now(); try { // Run the user's code code.run(getLog()); } catch (Exception e) { getLog().error("Unhandled exception thrown by user code in task " + name, e); } // Sleep until the period is over (or not at all if it's // already passed) startOfPeriod.add(frequency).fromNow().sleep(); } } }; // Start the thread final Thread thread = new Thread(runnable, name + " Task"); thread.setDaemon(isDaemon); thread.start(); // We're started all right! isStarted = true; } else { throw new IllegalStateException ("Attempt to start task that has already been started"); } } /** * @return */ protected Log getLog() { if(log == null) log = LogFactory.getLog(Task.class); return log; } /** * Sets start time for this task. You cannot set the start time for a task which * is already running. If you attempt to, an IllegalStateException will be thrown. * @param startTime The time this task should start running * @throws IllegalStateException Thrown if task is already running */ public synchronized void setStartTime(final Time startTime) { if (isStarted) { throw new IllegalStateException ("Attempt to set start time of task that has already been started"); } this.startTime = startTime; } /** * Set daemon or not. For obvious reasons, this value can only be set * before the task starts running. If you attempt to set this value * after the task starts running, an IllegalStateException will be thrown. * @param daemon True if this task's thread should be a daemon * @throws IllegalStateException Thrown if task is already running */ public synchronized void setDaemon(final boolean daemon) { if (isStarted) { throw new IllegalStateException ("Attempt to set daemon state of a task that has already been started"); } isDaemon = daemon; } /** * Set log for user code to log to when task runs. * @param log The log */ public synchronized void setLog(final Log log) { this.log = log; } /** * @see java.lang.Object#toString() */ public String toString() { return "[name=" + name + ", startTime=" + startTime + ", isDaemon=" + isDaemon + ", isStarted=" + isStarted + ", codeListener=" + log + "]"; } }
*** empty log message *** git-svn-id: ac804e38dcddf5e42ac850d29d9218b7df6087b7@455590 13f79535-47bb-0310-9956-ffa450edef68
wicket/src/java/wicket/util/thread/Task.java
*** empty log message ***
<ide><path>icket/src/java/wicket/util/thread/Task.java <ide> } <ide> <ide> /** <del> * @return <add> * Gets the log. <add> * @return the log <ide> */ <ide> protected Log getLog() <ide> {
Java
mit
9706fe49be141e40a5d84428b8d4be0823bd9cf8
0
opennars/opennars,opennars/opennars,opennars/opennars,opennars/opennars
package nars.gui.output; import automenta.vivisect.face.HumanoidFacePanel; import nars.core.NAR; public class NARFacePanel extends HumanoidFacePanel { private final NAR nar; public NARFacePanel(NAR n) { super(); this.nar = n; } @Override protected void onShowing(boolean showing) { super.onShowing(showing); // if (showing) { // nar.addOutput(this); // } // else { // nar.removeOutput(this); // } } @Override public void update(double t) { happy = nar.memory.emotion.happy() > 0.6; busy = nar.memory.emotion.busy() > 0.95; float conceptPriority = 0.5f; //((Number)nar.memory.logic.get("concept.priority.mean")).floatValue(); float taskNewPriority = 0.5f; //((Number)nar.memory.logic.get("task.new.priority.mean")).floatValue(); //max out at 0.5 conceptPriority = Math.min(conceptPriority, 0.4f); //if (nar.memory.getConcepts().isEmpty()) conceptPriority = 0; //if no concepts, start at zero, regardless of what mean might be valued face.setPupil(12f * (conceptPriority+0.35f)+2f, taskNewPriority*taskNewPriority*0.45f,0,0,0.9f); //pupils glow a little red for priority of new tasks face.setEyeball(8f * (conceptPriority + 0.35f)+12f,1f,1f,1f,0.85f); super.update(t); } // @Override // public void output(Class channel, Object signal) { // talk=1; // } }
nars_gui/nars/gui/output/NARFacePanel.java
package nars.gui.output; import automenta.vivisect.face.HumanoidFacePanel; import nars.core.NAR; public class NARFacePanel extends HumanoidFacePanel { private final NAR nar; public NARFacePanel(NAR n) { super(); this.nar = n; } @Override protected void onShowing(boolean showing) { super.onShowing(showing); // if (showing) { // nar.addOutput(this); // } // else { // nar.removeOutput(this); // } } @Override public void update(double t) { happy = nar.memory.emotion.happy() > 0.6; busy = nar.memory.emotion.busy() > 0.95; float conceptPriority = ((Number)nar.memory.logic.get("concept.priority.mean")).floatValue(); float taskNewPriority = ((Number)nar.memory.logic.get("task.new.priority.mean")).floatValue(); //max out at 0.5 conceptPriority = Math.min(conceptPriority, 0.4f); //if (nar.memory.getConcepts().isEmpty()) conceptPriority = 0; //if no concepts, start at zero, regardless of what mean might be valued face.setPupil(12f * (conceptPriority+0.35f)+2f, taskNewPriority*taskNewPriority*0.45f,0,0,0.9f); //pupils glow a little red for priority of new tasks face.setEyeball(8f * (conceptPriority + 0.35f)+12f,1f,1f,1f,0.85f); super.update(t); } // @Override // public void output(Class channel, Object signal) { // talk=1; // } }
fix NAR face panel for now.
nars_gui/nars/gui/output/NARFacePanel.java
fix NAR face panel for now.
<ide><path>ars_gui/nars/gui/output/NARFacePanel.java <ide> public void update(double t) { <ide> happy = nar.memory.emotion.happy() > 0.6; <ide> busy = nar.memory.emotion.busy() > 0.95; <del> float conceptPriority = ((Number)nar.memory.logic.get("concept.priority.mean")).floatValue(); <del> float taskNewPriority = ((Number)nar.memory.logic.get("task.new.priority.mean")).floatValue(); <add> float conceptPriority = 0.5f; //((Number)nar.memory.logic.get("concept.priority.mean")).floatValue(); <add> float taskNewPriority = 0.5f; //((Number)nar.memory.logic.get("task.new.priority.mean")).floatValue(); <ide> <ide> //max out at 0.5 <ide> conceptPriority = Math.min(conceptPriority, 0.4f);
Java
apache-2.0
8a035b87e5f3c2558f9ca8fbc3f6b58d65b6266d
0
millmanorama/autopsy,rcordovano/autopsy,rcordovano/autopsy,esaunders/autopsy,esaunders/autopsy,millmanorama/autopsy,rcordovano/autopsy,esaunders/autopsy,wschaeferB/autopsy,wschaeferB/autopsy,esaunders/autopsy,wschaeferB/autopsy,rcordovano/autopsy,wschaeferB/autopsy,rcordovano/autopsy,millmanorama/autopsy,wschaeferB/autopsy,esaunders/autopsy,millmanorama/autopsy,rcordovano/autopsy
/* * Autopsy Forensic Browser * * Copyright 2013-2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.modules.embeddedfileextractor; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import net.sf.sevenzipjbinding.ArchiveFormat; import static net.sf.sevenzipjbinding.ArchiveFormat.RAR; import net.sf.sevenzipjbinding.ISequentialOutStream; import net.sf.sevenzipjbinding.ISevenZipInArchive; import net.sf.sevenzipjbinding.SevenZip; import net.sf.sevenzipjbinding.SevenZipException; import net.sf.sevenzipjbinding.SevenZipNativeInitializationException; import net.sf.sevenzipjbinding.simple.ISimpleInArchive; import net.sf.sevenzipjbinding.simple.ISimpleInArchiveItem; import net.sf.sevenzipjbinding.ExtractOperationResult; import org.netbeans.api.progress.ProgressHandle; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.services.Blackboard; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMonitor; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DerivedFile; import org.sleuthkit.datamodel.EncodedFileOutputStream; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; class SevenZipExtractor { private static final Logger logger = Logger.getLogger(SevenZipExtractor.class.getName()); private IngestServices services = IngestServices.getInstance(); private final IngestJobContext context; private final FileTypeDetector fileTypeDetector; //encryption type strings private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel"); private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull"); //zip bomb detection private static final int MAX_DEPTH = 4; private static final int MAX_COMPRESSION_RATIO = 600; private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L; private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB private String moduleDirRelative; private String moduleDirAbsolute; private Blackboard blackboard; private String getLocalRootAbsPath(String uniqueArchiveFileName) { return moduleDirAbsolute + File.separator + uniqueArchiveFileName; } /** * Enum of mimetypes which support archive extraction */ private enum SupportedArchiveExtractionFormats { ZIP("application/zip"), //NON-NLS SEVENZ("application/x-7z-compressed"), //NON-NLS GZIP("application/gzip"), //NON-NLS XGZIP("application/x-gzip"), //NON-NLS XBZIP2("application/x-bzip2"), //NON-NLS XTAR("application/x-tar"), //NON-NLS XGTAR("application/x-gtar"), XRAR("application/x-rar-compressed"); //NON-NLS private final String mimeType; SupportedArchiveExtractionFormats(final String mimeType) { this.mimeType = mimeType; } @Override public String toString() { return this.mimeType; } // TODO Expand to support more formats after upgrading Tika } SevenZipExtractor(IngestJobContext context, FileTypeDetector fileTypeDetector, String moduleDirRelative, String moduleDirAbsolute) throws SevenZipNativeInitializationException { if (!SevenZip.isInitializedSuccessfully() && (SevenZip.getLastInitializationException() == null)) { SevenZip.initSevenZipFromPlatformJAR(); } this.context = context; this.fileTypeDetector = fileTypeDetector; this.moduleDirRelative = moduleDirRelative; this.moduleDirAbsolute = moduleDirAbsolute; } /** * Checks whether extraction is supported for a file, based on MIME type. * * @param file The file. * * @return This method returns true if the file format is currently * supported. Else it returns false. */ boolean isSevenZipExtractionSupported(AbstractFile file) { String fileMimeType = fileTypeDetector.getMIMEType(file); for (SupportedArchiveExtractionFormats mimeType : SupportedArchiveExtractionFormats.values()) { if (mimeType.toString().equals(fileMimeType)) { return true; } } return false; } /** * Check if the item inside archive is a potential zipbomb * * Currently checks compression ratio. * * More heuristics to be added here * * @param archiveName the parent archive * @param archiveFileItem the archive item * * @return true if potential zip bomb, false otherwise */ private boolean isZipBombArchiveItemCheck(AbstractFile archiveFile, ISimpleInArchiveItem archiveFileItem) { try { final Long archiveItemSize = archiveFileItem.getSize(); //skip the check for small files if (archiveItemSize == null || archiveItemSize < MIN_COMPRESSION_RATIO_SIZE) { return false; } final Long archiveItemPackedSize = archiveFileItem.getPackedSize(); if (archiveItemPackedSize == null || archiveItemPackedSize <= 0) { logger.log(Level.WARNING, "Cannot getting compression ratio, cannot detect if zipbomb: {0}, item: {1}", new Object[]{archiveFile.getName(), archiveFileItem.getPath()}); //NON-NLS return false; } int cRatio = (int) (archiveItemSize / archiveItemPackedSize); if (cRatio >= MAX_COMPRESSION_RATIO) { String itemName = archiveFileItem.getPath(); logger.log(Level.INFO, "Possible zip bomb detected, compression ration: {0} for in archive item: {1}", new Object[]{cRatio, itemName}); //NON-NLS String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), itemName); String path; try { path = archiveFile.getUniquePath(); } catch (TskCoreException ex) { path = archiveFile.getParentPath() + archiveFile.getName(); } String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails", cRatio, path); //MessageNotifyUtil.Notify.error(msg, details); services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); return true; } else { return false; } } catch (SevenZipException ex) { logger.log(Level.WARNING, "Error getting archive item size and cannot detect if zipbomb. ", ex); //NON-NLS return false; } } /** * Check file extension and return appropriate input options for * SevenZip.openInArchive() * * @param archiveFile file to check file extension * * @return input parameter for SevenZip.openInArchive() */ private ArchiveFormat get7ZipOptions(AbstractFile archiveFile) { // try to get the file type from the BB String detectedFormat; detectedFormat = archiveFile.getMIMEType(); if (detectedFormat == null) { logger.log(Level.WARNING, "Could not detect format for file: {0}", archiveFile); //NON-NLS // if we don't have attribute info then use file extension String extension = archiveFile.getNameExtension(); if ("rar".equals(extension)) //NON-NLS { // for RAR files we need to open them explicitly as RAR. Otherwise, if there is a ZIP archive inside RAR archive // it will be opened incorrectly when using 7zip's built-in auto-detect functionality return RAR; } // Otherwise open the archive using 7zip's built-in auto-detect functionality return null; } else if (detectedFormat.contains("application/x-rar-compressed")) //NON-NLS { // for RAR files we need to open them explicitly as RAR. Otherwise, if there is a ZIP archive inside RAR archive // it will be opened incorrectly when using 7zip's built-in auto-detect functionality return RAR; } // Otherwise open the archive using 7zip's built-in auto-detect functionality return null; } /** * Get the data source object id of the root data source for the specified * archive * * @param file the archive which the root data source id is being found * * @return the data source object id of the root data source * * @throws TskCoreException */ private long getRootArchiveId(AbstractFile file) throws TskCoreException { long id = file.getId(); Content parentContent = file.getParent(); while (parentContent != null) { id = parentContent.getId(); parentContent = parentContent.getParent(); } return id; } /** * Query the database and get the list of files which exist for this archive * which have already been added to the case database. * * @param archiveFile the archiveFile to get the files associated with * @param archiveFilePath the archive file path that must be contained in * the parent_path of files * * @return the list of files which already exist in the case database for * this archive * * @throws TskCoreException * @throws NoCurrentCaseException */ private List<AbstractFile> getAlreadyExtractedFiles(AbstractFile archiveFile, String archiveFilePath) throws TskCoreException, NoCurrentCaseException { //check if already has derived files, skip //check if local unpacked dir exists if (archiveFile.hasChildren() && new File(moduleDirAbsolute, EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile)).exists()) { return Case.getCurrentCaseThrows().getServices().getFileManager().findFilesByParentPath(getRootArchiveId(archiveFile), archiveFilePath); } return new ArrayList<>(); } /** * Get the archiveFilePath * * @param archiveFile the archiveFile to get the path for * * @return the archiveFilePath to be used by the unpack method */ private String getArchiveFilePath(AbstractFile archiveFile) { try { return archiveFile.getUniquePath(); } catch (TskCoreException ex) { return archiveFile.getParentPath() + archiveFile.getName(); } } /** * Create the local directories if they do not exist for the archive * * @param uniqueArchiveFileName the unique name which corresponds to the * archive file in this datasource */ private void makeLocalDirectories(String uniqueArchiveFileName) { final String localRootAbsPath = getLocalRootAbsPath(uniqueArchiveFileName); final File localRoot = new File(localRootAbsPath); if (!localRoot.exists()) { localRoot.mkdirs(); } } /** * Get the path in the archive of the specified item * * @param item - the item to get the path for * @param itemNumber - the item number to help provide uniqueness to the * path * @param archiveFile - the archive file the item exists in * * @return a string representing the path to the item in the archive * * @throws SevenZipException */ private String getPathInArchive(ISimpleInArchiveItem item, int itemNumber, AbstractFile archiveFile) throws SevenZipException { String pathInArchive = item.getPath(); if (pathInArchive == null || pathInArchive.isEmpty()) { //some formats (.tar.gz) may not be handled correctly -- file in archive has no name/path //handle this for .tar.gz and tgz but assuming the child is tar, //otherwise, unpack using itemNumber as name //TODO this should really be signature based, not extension based String archName = archiveFile.getName(); int dotI = archName.lastIndexOf("."); String useName = null; if (dotI != -1) { String base = archName.substring(0, dotI); String ext = archName.substring(dotI); int colonIndex = ext.lastIndexOf(":"); if (colonIndex != -1) { // If alternate data stream is found, fix the name // so Windows doesn't choke on the colon character. ext = ext.substring(0, colonIndex); } switch (ext) { case ".gz": //NON-NLS useName = base; break; case ".tgz": //NON-NLS useName = base + ".tar"; //NON-NLS break; case ".bz2": //NON-NLS useName = base; break; } } if (useName == null) { pathInArchive = "/" + archName + "/" + Integer.toString(itemNumber); } else { pathInArchive = "/" + useName; } String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.unknownPath.msg", getArchiveFilePath(archiveFile), pathInArchive); logger.log(Level.WARNING, msg); } return pathInArchive; } /* * Get the String that will represent the key for the hashmap which keeps * track of existing files from an AbstractFile */ private String getKeyAbstractFile(AbstractFile fileInDatabase) { return fileInDatabase == null ? null : fileInDatabase.getParentPath() + fileInDatabase.getName(); } /* * Get the String that will represent the key for the hashmap which keeps * track of existing files from an unpacked node and the archiveFilePath */ private String getKeyFromUnpackedNode(UnpackedTree.UnpackedNode node, String archiveFilePath) { return node == null ? null : archiveFilePath + "/" + node.getFileName(); } /** * Unpack an archive item to the disk using a password if specified. * * @param item - the archive item to unpack * @param unpackedNode - the unpackedNode to add derivedInfo to * @param password - the password for the archive, null if not * used * @param freeDiskSpace - the amount of free disk space * @param uniqueExtractedName - the name of the file to extract the item to * * @return unpackedNode - the updated unpackedNode * * @throws SevenZipException */ private SevenZipExtractor.UnpackedTree.UnpackedNode unpackNode(ISimpleInArchiveItem item, SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode, String password, long freeDiskSpace, String uniqueExtractedName) throws SevenZipException { //unpack locally if a file final String localAbsPath = moduleDirAbsolute + File.separator + uniqueExtractedName; final String localRelPath = moduleDirRelative + File.separator + uniqueExtractedName; final Date createTime = item.getCreationTime(); final Date accessTime = item.getLastAccessTime(); final Date writeTime = item.getLastWriteTime(); final long createtime = createTime == null ? 0L : createTime.getTime() / 1000; final long modtime = writeTime == null ? 0L : writeTime.getTime() / 1000; final long accesstime = accessTime == null ? 0L : accessTime.getTime() / 1000; SevenZipExtractor.UnpackStream unpackStream = null; boolean isDir = item.isFolder(); if (!isDir) { try { // NOTE: item.getSize() may return null in case of certain // archiving formats. Eg: BZ2 if (item.getSize() != null) { unpackStream = new SevenZipExtractor.KnownSizeUnpackStream(localAbsPath, item.getSize()); } else { unpackStream = new SevenZipExtractor.UnknownSizeUnpackStream(localAbsPath, freeDiskSpace); } ExtractOperationResult result; if (password == null) { result = item.extractSlow(unpackStream); } else { result = item.extractSlow(unpackStream, password); } if (result != ExtractOperationResult.OK) { logger.log(Level.WARNING, "Extraction of : {0} encountered error {1}", new Object[]{localAbsPath, result}); //NON-NLS return null; } } catch (SevenZipException e) { //could be something unexpected with this file, move on logger.log(Level.WARNING, "Could not extract file from archive: " + localAbsPath, e); //NON-NLS } finally { if (unpackStream != null) { //record derived data in unode, to be traversed later after unpacking the archive unpackedNode.addDerivedInfo(unpackStream.getSize(), !isDir, 0L, createtime, accesstime, modtime, localRelPath); unpackStream.close(); } } } else { // this is a directory, size is always 0 unpackedNode.addDerivedInfo(0, !isDir, 0L, createtime, accesstime, modtime, localRelPath); } return unpackedNode; } /** * Unpack the file to local folder and return a list of derived files * * @param archiveFile file to unpack * @param depthMap - a concurrent hashmap which keeps track of the depth * of all nested archives * * @return true if unpacking is complete */ void unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthMap) { unpack(archiveFile, depthMap, null); } /** * Unpack the file to local folder and return a list of derived files, use * the password if specified. * * @param archiveFile - file to unpack * @param depthMap - a concurrent hashmap which keeps track of the depth * of all nested archives * @param password - the password to use, null for no password * * @return true if unpacking is complete */ @Messages({"SevenZipExtractor.indexError.message=Failed to index encryption detected artifact for keyword search."}) boolean unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthMap, String password) { boolean unpackSuccessful = true; //initialized to true change to false if any files fail to extract and boolean hasEncrypted = false; boolean fullEncryption = true; boolean progressStarted = false; int processedItems = 0; final String archiveFilePath = getArchiveFilePath(archiveFile); final String escapedArchiveFilePath = FileUtil.escapeFileName(archiveFilePath); HashMap<String, ZipFileStatusWrapper> statusMap = new HashMap<>(); List<AbstractFile> unpackedFiles = Collections.<AbstractFile>emptyList(); ISevenZipInArchive inArchive = null; SevenZipContentReadStream stream = null; final ProgressHandle progress = ProgressHandle.createHandle(Bundle.EmbeddedFileExtractorIngestModule_ArchiveExtractor_moduleName()); //recursion depth check for zip bomb final long archiveId = archiveFile.getId(); Archive parentAr; try { blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard(); } catch (NoCurrentCaseException ex) { logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS unpackSuccessful = false; return unpackSuccessful; } try { List<AbstractFile> existingFiles = getAlreadyExtractedFiles(archiveFile, archiveFilePath); for (AbstractFile file : existingFiles) { statusMap.put(getKeyAbstractFile(file), new ZipFileStatusWrapper(file, ZipFileStatus.EXISTS)); } } catch (TskCoreException e) { logger.log(Level.INFO, "Error checking if file already has been processed, skipping: {0}", escapedArchiveFilePath); //NON-NLS unpackSuccessful = false; return unpackSuccessful; } catch (NoCurrentCaseException ex) { logger.log(Level.INFO, "No open case was found while trying to unpack the archive file {0}", escapedArchiveFilePath); //NON-NLS unpackSuccessful = false; return unpackSuccessful; } parentAr = depthMap.get(archiveId); if (parentAr == null) { parentAr = new Archive(archiveId, 0); depthMap.put(archiveId, parentAr); } else if (parentAr.getDepth() == MAX_DEPTH) { String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnMsg.zipBomb", archiveFile.getName()); String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnDetails.zipBomb", parentAr.getDepth(), escapedArchiveFilePath); //MessageNotifyUtil.Notify.error(msg, details); services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); unpackSuccessful = false; return unpackSuccessful; } try { stream = new SevenZipContentReadStream(new ReadContentInputStream(archiveFile)); // for RAR files we need to open them explicitly as RAR. Otherwise, if there is a ZIP archive inside RAR archive // it will be opened incorrectly when using 7zip's built-in auto-detect functionality. // All other archive formats are still opened using 7zip built-in auto-detect functionality. ArchiveFormat options = get7ZipOptions(archiveFile); if (password == null) { inArchive = SevenZip.openInArchive(options, stream); } else { inArchive = SevenZip.openInArchive(options, stream, password); } int numItems = inArchive.getNumberOfItems(); logger.log(Level.INFO, "Count of items in archive: {0}: {1}", new Object[]{escapedArchiveFilePath, numItems}); //NON-NLS progress.start(numItems); progressStarted = true; final ISimpleInArchive simpleInArchive = inArchive.getSimpleInterface(); //setup the archive local root folder final String uniqueArchiveFileName = FileUtil.escapeFileName(EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile)); try { makeLocalDirectories(uniqueArchiveFileName); } catch (SecurityException e) { logger.log(Level.SEVERE, "Error setting up output path for archive root: {0}", getLocalRootAbsPath(uniqueArchiveFileName)); //NON-NLS //bail unpackSuccessful = false; return unpackSuccessful; } //initialize tree hierarchy to keep track of unpacked file structure SevenZipExtractor.UnpackedTree unpackedTree = new SevenZipExtractor.UnpackedTree(moduleDirRelative + "/" + uniqueArchiveFileName, archiveFile); long freeDiskSpace; try { freeDiskSpace = services.getFreeDiskSpace(); } catch (NullPointerException ex) { //If ingest has not been run at least once getFreeDiskSpace() will throw a null pointer exception //currently getFreeDiskSpace always returns DISK_FREE_SPACE_UNKNOWN freeDiskSpace = IngestMonitor.DISK_FREE_SPACE_UNKNOWN; } //unpack and process every item in archive int itemNumber = 0; for (ISimpleInArchiveItem item : simpleInArchive.getArchiveItems()) { String pathInArchive = getPathInArchive(item, itemNumber, archiveFile); //query for path in db ++itemNumber; //check if possible zip bomb if (isZipBombArchiveItemCheck(archiveFile, item)) { unpackSuccessful = false; return unpackSuccessful; } SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode = unpackedTree.addNode(pathInArchive); //update progress bar progress.progress(archiveFile.getName() + ": " + item.getPath(), processedItems); final boolean isEncrypted = item.isEncrypted(); if (isEncrypted && password == null) { logger.log(Level.WARNING, "Skipping encrypted file in archive: {0}", pathInArchive); //NON-NLS hasEncrypted = true; unpackSuccessful = false; continue; } else { fullEncryption = false; } // NOTE: item.getSize() may return null in case of certain // archiving formats. Eg: BZ2 //check if unpacking this file will result in out of disk space //this is additional to zip bomb prevention mechanism if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && item.getSize() != null && item.getSize() > 0) { //if free space is known and file is not empty. long newDiskSpace = freeDiskSpace - item.getSize(); if (newDiskSpace < MIN_FREE_DISK_SPACE) { String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.msg", escapedArchiveFilePath, item.getPath()); String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details"); //MessageNotifyUtil.Notify.error(msg, details); services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); logger.log(Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new String[]{escapedArchiveFilePath, item.getPath()}); //NON-NLS logger.log(Level.INFO, "Available disk space: {0}", new Object[]{freeDiskSpace}); //NON-NLS unpackSuccessful = false; continue; //skip this file } else { //update est. disk space during this archive, so we don't need to poll for every file extracted freeDiskSpace = newDiskSpace; } } final String uniqueExtractedName = FileUtil.escapeFileName(uniqueArchiveFileName + File.separator + (item.getItemIndex() / 1000) + File.separator + item.getItemIndex() + "_" + new File(pathInArchive).getName()); //create local dirs and empty files before extracted File localFile = new java.io.File(moduleDirAbsolute + File.separator + uniqueExtractedName); //cannot rely on files in top-bottom order if (!localFile.exists()) { try { if (item.isFolder()) { localFile.mkdirs(); } else { localFile.getParentFile().mkdirs(); try { localFile.createNewFile(); } catch (IOException e) { logger.log(Level.SEVERE, "Error creating extracted file: " + localFile.getAbsolutePath(), e); //NON-NLS } } } catch (SecurityException e) { logger.log(Level.SEVERE, "Error setting up output path for unpacked file: {0}", pathInArchive); //NON-NLS //TODO consider bail out / msg to the user } } // skip the rest of this loop if we couldn't create the file if (localFile.exists() == false) { continue; } //find this node in the hierarchy, create if neede; unpackedNode = unpackNode(item, unpackedNode, password, freeDiskSpace, uniqueExtractedName); if (unpackedNode == null) { unpackSuccessful = false; } //update units for progress bar ++processedItems; } // add them to the DB. We wait until the end so that we have the metadata on all of the // intermediate nodes since the order is not guaranteed try { unpackedTree.updateOrAddFileToCaseRec(statusMap, archiveFilePath); unpackedFiles = unpackedTree.getAllFileObjects(); //check if children are archives, update archive depth tracking for (AbstractFile unpackedFile : unpackedFiles) { if (unpackedFile == null) { continue; } if (isSevenZipExtractionSupported(unpackedFile)) { Archive child = new Archive(unpackedFile.getId(), parentAr.getDepth() + 1); parentAr.addChild(child); depthMap.put(unpackedFile.getId(), child); } } } catch (TskCoreException | NoCurrentCaseException e) { logger.log(Level.SEVERE, "Error populating complete derived file hierarchy from the unpacked dir structure", e); //NON-NLS //TODO decide if anything to cleanup, for now bailing } } catch (SevenZipException ex) { logger.log(Level.WARNING, "Error unpacking file: " + archiveFile, ex); //NON-NLS //inbox message // print a message if the file is allocated if (archiveFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) { String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.msg", archiveFile.getName()); String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details", escapedArchiveFilePath, ex.getMessage()); services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); } } finally { if (inArchive != null) { try { inArchive.close(); } catch (SevenZipException e) { logger.log(Level.SEVERE, "Error closing archive: " + archiveFile, e); //NON-NLS } } if (stream != null) { try { stream.close(); } catch (IOException ex) { logger.log(Level.SEVERE, "Error closing stream after unpacking archive: " + archiveFile, ex); //NON-NLS } } //close progress bar if (progressStarted) { progress.finish(); } } //create artifact and send user message if (hasEncrypted) { String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL; try { BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED); artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, EmbeddedFileExtractorModuleFactory.getModuleName(), encryptionType)); try { // index the artifact for keyword search blackboard.indexArtifact(artifact); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS MessageNotifyUtil.Notify.error( Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName()); } services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + escapedArchiveFilePath, ex); //NON-NLS } String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg"); String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details", archiveFile.getName(), EmbeddedFileExtractorModuleFactory.getModuleName()); services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); } // adding unpacked extracted derived files to the job after closing relevant resources. if (!unpackedFiles.isEmpty()) { //currently sending a single event for all new files services.fireModuleContentEvent(new ModuleContentEvent(archiveFile)); if (context != null) { context.addFilesToJob(unpackedFiles); } } return unpackSuccessful; } /** * Stream used to unpack the archive to local file */ private abstract static class UnpackStream implements ISequentialOutStream { private OutputStream output; private String localAbsPath; UnpackStream(String localAbsPath) { this.localAbsPath = localAbsPath; try { output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1); } catch (IOException ex) { logger.log(Level.SEVERE, "Error writing extracted file: " + localAbsPath, ex); //NON-NLS } } public abstract long getSize(); OutputStream getOutput() { return output; } String getLocalAbsPath() { return localAbsPath; } public void close() { if (output != null) { try { output.flush(); output.close(); } catch (IOException e) { logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS } } } } /** * Stream used to unpack the archive of unknown size to local file */ private static class UnknownSizeUnpackStream extends UnpackStream { private long freeDiskSpace; private boolean outOfSpace = false; private long bytesWritten = 0; UnknownSizeUnpackStream(String localAbsPath, long freeDiskSpace) { super(localAbsPath); this.freeDiskSpace = freeDiskSpace; } @Override public long getSize() { return this.bytesWritten; } @Override public int write(byte[] bytes) throws SevenZipException { try { // If the content size is unknown, cautiously write to disk. // Write only if byte array is less than 80% of the current // free disk space. if (freeDiskSpace == IngestMonitor.DISK_FREE_SPACE_UNKNOWN || bytes.length < 0.8 * freeDiskSpace) { getOutput().write(bytes); // NOTE: this method is called multiple times for a // single extractSlow() call. Update bytesWritten and // freeDiskSpace after every write operation. this.bytesWritten += bytes.length; this.freeDiskSpace -= bytes.length; } else { this.outOfSpace = true; logger.log(Level.INFO, NbBundle.getMessage( SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg")); throw new SevenZipException( NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg")); } } catch (IOException ex) { throw new SevenZipException( NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg", getLocalAbsPath()), ex); } return bytes.length; } @Override public void close() { if (getOutput() != null) { try { getOutput().flush(); getOutput().close(); if (this.outOfSpace) { Files.delete(Paths.get(getLocalAbsPath())); } } catch (IOException e) { logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", getLocalAbsPath()); //NON-NLS } } } } /** * Stream used to unpack the archive of known size to local file */ private static class KnownSizeUnpackStream extends UnpackStream { private long size; KnownSizeUnpackStream(String localAbsPath, long size) { super(localAbsPath); this.size = size; } @Override public long getSize() { return this.size; } @Override public int write(byte[] bytes) throws SevenZipException { try { getOutput().write(bytes); } catch (IOException ex) { throw new SevenZipException( NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg", getLocalAbsPath()), ex); } return bytes.length; } } /** * Representation of the files in the archive. Used to track of local tree * file hierarchy, archive depth, and files created to easily and reliably * get parent AbstractFile for unpacked file. So that we don't have to * depend on type of traversal of unpacked files handed to us by 7zip * unpacker. */ private class UnpackedTree { final UnpackedNode rootNode; /** * * @param localPathRoot Path in module output folder that files will be * saved to * @param archiveFile Archive file being extracted * @param fileManager */ UnpackedTree(String localPathRoot, AbstractFile archiveFile) { this.rootNode = new UnpackedNode(); this.rootNode.setFile(archiveFile); this.rootNode.setFileName(archiveFile.getName()); this.rootNode.setLocalRelPath(localPathRoot); } /** * Creates a node in the tree at the given path. Makes intermediate * nodes if needed. If a node already exists at that path, it is * returned. * * @param filePath file path with 1 or more tokens separated by / * * @return child node for the last file token in the filePath */ UnpackedNode addNode(String filePath) { String[] toks = filePath.split("[\\/\\\\]"); List<String> tokens = new ArrayList<>(); for (int i = 0; i < toks.length; ++i) { if (!toks[i].isEmpty()) { tokens.add(toks[i]); } } return addNode(rootNode, tokens); } /** * recursive method that traverses the path * * @param parent * @param tokenPath * * @return */ private UnpackedNode addNode(UnpackedNode parent, List<String> tokenPath) { // we found all of the tokens if (tokenPath.isEmpty()) { return parent; } // get the next name in the path and look it up String childName = tokenPath.remove(0); UnpackedNode child = parent.getChild(childName); // create new node if (child == null) { child = new UnpackedNode(childName, parent); parent.addChild(child); } // go down one more level return addNode(child, tokenPath); } /** * Get the root file objects (after createDerivedFiles() ) of this tree, * so that they can be rescheduled. * * @return root objects of this unpacked tree */ List<AbstractFile> getRootFileObjects() { List<AbstractFile> ret = new ArrayList<>(); for (UnpackedNode child : rootNode.getChildren()) { ret.add(child.getFile()); } return ret; } /** * Get the all file objects (after createDerivedFiles() ) of this tree, * so that they can be rescheduled. * * @return all file objects of this unpacked tree */ List<AbstractFile> getAllFileObjects() { List<AbstractFile> ret = new ArrayList<>(); for (UnpackedNode child : rootNode.getChildren()) { getAllFileObjectsRec(ret, child); } return ret; } private void getAllFileObjectsRec(List<AbstractFile> list, UnpackedNode parent) { list.add(parent.getFile()); for (UnpackedNode child : parent.getChildren()) { getAllFileObjectsRec(list, child); } } /** * Traverse the tree top-down after unzipping is done and create derived * files for the entire hierarchy */ void updateOrAddFileToCaseRec(HashMap<String, ZipFileStatusWrapper> statusMap, String archiveFilePath) throws TskCoreException, NoCurrentCaseException { final FileManager fileManager = Case.getCurrentCaseThrows().getServices().getFileManager(); for (UnpackedNode child : rootNode.getChildren()) { updateOrAddFileToCaseRec(child, fileManager, statusMap, archiveFilePath); } } /** * Add derived files to the case if they do not exist, update the * derived file data if the new file contains more information than the * existing one, and do nothing if the existing information is complete. * * @param node - the UnpackedNode for the file which is being * added or updated * @param fileManager - the file manager to perform the adding or * updating * @param statusMap - the map of existing files and their status * @param archiveFilePath - the archive file path for the unpacked node * * @throws TskCoreException */ private void updateOrAddFileToCaseRec(UnpackedNode node, FileManager fileManager, HashMap<String, ZipFileStatusWrapper> statusMap, String archiveFilePath) throws TskCoreException { DerivedFile df; try { String nameInDatabase = getKeyFromUnpackedNode(node, archiveFilePath); ZipFileStatusWrapper existingFile = nameInDatabase == null ? null : statusMap.get(nameInDatabase); if (existingFile == null) { df = fileManager.addDerivedFile(node.getFileName(), node.getLocalRelPath(), node.getSize(), node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(), node.isIsFile(), node.getParent().getFile(), "", EmbeddedFileExtractorModuleFactory.getModuleName(), "", "", TskData.EncodingType.XOR1); statusMap.put(getKeyAbstractFile(df), new ZipFileStatusWrapper(df, ZipFileStatus.EXISTS)); } else { String key = getKeyAbstractFile(existingFile.getFile()); if (existingFile.getStatus() == ZipFileStatus.EXISTS && existingFile.getFile().getSize() < node.getSize()) { existingFile.setStatus(ZipFileStatus.UPDATE); statusMap.put(key, existingFile); } if (existingFile.getStatus() == ZipFileStatus.UPDATE) { //if the we are updating a file and its mime type was octet-stream we want to re-type it String mimeType = existingFile.getFile().getMIMEType().equalsIgnoreCase("application/octet-stream") ? null : existingFile.getFile().getMIMEType(); df = fileManager.updateDerivedFile((DerivedFile) existingFile.getFile(), node.getLocalRelPath(), node.getSize(), node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(), node.isIsFile(), mimeType, "", EmbeddedFileExtractorModuleFactory.getModuleName(), "", "", TskData.EncodingType.XOR1); } else { //ALREADY CURRENT - SKIP statusMap.put(key, new ZipFileStatusWrapper(existingFile.getFile(), ZipFileStatus.SKIP)); df = (DerivedFile) existingFile.getFile(); } } node.setFile(df); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding a derived file to db:" + node.getFileName(), ex); //NON-NLS throw new TskCoreException( NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackedTree.exception.msg", node.getFileName()), ex); } //recurse adding the children if this file was incomplete the children presumably need to be added for (UnpackedNode child : node.getChildren()) { updateOrAddFileToCaseRec(child, fileManager, statusMap, getKeyFromUnpackedNode(node, archiveFilePath)); } } /** * A node in the unpacked tree that represents a file or folder. */ private class UnpackedNode { private String fileName; private AbstractFile file; private final List<UnpackedNode> children = new ArrayList<>(); private String localRelPath = ""; private long size; private long ctime, crtime, atime, mtime; private boolean isFile; private UnpackedNode parent; //root constructor UnpackedNode() { } //child node constructor UnpackedNode(String fileName, UnpackedNode parent) { this.fileName = fileName; this.parent = parent; this.localRelPath = parent.getLocalRelPath() + File.separator + fileName; } long getCtime() { return ctime; } long getCrtime() { return crtime; } long getAtime() { return atime; } long getMtime() { return mtime; } void setFileName(String fileName) { this.fileName = fileName; } /** * Add a child to the list of child nodes associated with this node. * * @param child - the node which is a child node of this node */ void addChild(UnpackedNode child) { children.add(child); } /** * Get this nodes list of child UnpackedNode * * @return children - the UnpackedNodes which are children of this * node. */ List<UnpackedNode> getChildren() { return children; } /** * Gets the parent node of this node. * * @return - the parent UnpackedNode */ UnpackedNode getParent() { return parent; } void addDerivedInfo(long size, boolean isFile, long ctime, long crtime, long atime, long mtime, String relLocalPath) { this.size = size; this.isFile = isFile; this.ctime = ctime; this.crtime = crtime; this.atime = atime; this.mtime = mtime; this.localRelPath = relLocalPath; } void setFile(AbstractFile file) { this.file = file; } /** * get child by name or null if it doesn't exist * * @param childFileName * * @return */ UnpackedNode getChild(String childFileName) { UnpackedNode ret = null; for (UnpackedNode child : children) { if (child.getFileName().equals(childFileName)) { ret = child; break; } } return ret; } String getFileName() { return fileName; } AbstractFile getFile() { return file; } String getLocalRelPath() { return localRelPath; } /** * Set the local relative path associated with this UnpackedNode * * @param localRelativePath - the local relative path to be * associated with this node. */ void setLocalRelPath(String localRelativePath) { localRelPath = localRelativePath; } long getSize() { return size; } boolean isIsFile() { return isFile; } } } static class Archive { private final int depth; private final long objectId; private final List<Archive> children; Archive(long objectId, int depth) { this.objectId = objectId; this.children = new ArrayList<>(); this.depth = depth; } /** * Add a child to the list of child archives associated with this * archive. * * @param child - the archive which is a child archive of this archive */ void addChild(Archive child) { children.add(child); } /** * Get the object id of this archive. * * @return objectId - the unique objectId of this archive */ long getObjectId() { return objectId; } /** * Get archive depth of this archive * * @return depth - an integer representing how many layers of archives * this archive is inside. */ int getDepth() { return depth; } } /** * A class which wraps an AbstractFile and an enum identifing whether the * file which exists in the case database is current */ private final class ZipFileStatusWrapper { private final AbstractFile abstractFile; private ZipFileStatus zipStatus; /** * Construct a ZipFileStatusWrapper to wrap the given AbstractFile and * status * * @param file - The AbstractFile which exists in the case database * @param status - an indicator of if the file information is current */ private ZipFileStatusWrapper(AbstractFile file, ZipFileStatus status) { abstractFile = file; zipStatus = status; } /** * Get the AbstractFile contained in this object * * @return abstractFile - The abstractFile this object wraps */ private AbstractFile getFile() { return abstractFile; } /** * Get whether the file should be skipped or updated * * @return zipStatus - an Enum value indicating if the file is current */ private ZipFileStatus getStatus() { return zipStatus; } /** * Set the zipStatus of the file being wrapped when it changes * * @param status - an Enum value indicating if the file is current */ private void setStatus(ZipFileStatus status) { zipStatus = status; } } /** * The status of the file from the archive in regards to whether it should * be updated */ private enum ZipFileStatus { UPDATE, //Should be updated //NON-NLS SKIP, //File is current can be skipped //NON-NLS EXISTS //File exists but it is unknown if it is current //NON-NLS } }
Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
/* * Autopsy Forensic Browser * * Copyright 2013-2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.modules.embeddedfileextractor; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import net.sf.sevenzipjbinding.ArchiveFormat; import static net.sf.sevenzipjbinding.ArchiveFormat.RAR; import net.sf.sevenzipjbinding.ISequentialOutStream; import net.sf.sevenzipjbinding.ISevenZipInArchive; import net.sf.sevenzipjbinding.SevenZip; import net.sf.sevenzipjbinding.SevenZipException; import net.sf.sevenzipjbinding.SevenZipNativeInitializationException; import net.sf.sevenzipjbinding.simple.ISimpleInArchive; import net.sf.sevenzipjbinding.simple.ISimpleInArchiveItem; import net.sf.sevenzipjbinding.ExtractOperationResult; import org.netbeans.api.progress.ProgressHandle; import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.services.Blackboard; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMonitor; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.DerivedFile; import org.sleuthkit.datamodel.EncodedFileOutputStream; import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; class SevenZipExtractor { private static final Logger logger = Logger.getLogger(SevenZipExtractor.class.getName()); private IngestServices services = IngestServices.getInstance(); private final IngestJobContext context; private final FileTypeDetector fileTypeDetector; //encryption type strings private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel"); private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull"); //zip bomb detection private static final int MAX_DEPTH = 4; private static final int MAX_COMPRESSION_RATIO = 600; private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L; private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB private String moduleDirRelative; private String moduleDirAbsolute; private Blackboard blackboard; private String getLocalRootAbsPath(String uniqueArchiveFileName) { return moduleDirAbsolute + File.separator + uniqueArchiveFileName; } /** * Enum of mimetypes which support archive extraction */ private enum SupportedArchiveExtractionFormats { ZIP("application/zip"), //NON-NLS SEVENZ("application/x-7z-compressed"), //NON-NLS GZIP("application/gzip"), //NON-NLS XGZIP("application/x-gzip"), //NON-NLS XBZIP2("application/x-bzip2"), //NON-NLS XTAR("application/x-tar"), //NON-NLS XGTAR("application/x-gtar"), XRAR("application/x-rar-compressed"); //NON-NLS private final String mimeType; SupportedArchiveExtractionFormats(final String mimeType) { this.mimeType = mimeType; } @Override public String toString() { return this.mimeType; } // TODO Expand to support more formats after upgrading Tika } SevenZipExtractor(IngestJobContext context, FileTypeDetector fileTypeDetector, String moduleDirRelative, String moduleDirAbsolute) throws SevenZipNativeInitializationException { if (!SevenZip.isInitializedSuccessfully() && (SevenZip.getLastInitializationException() == null)) { SevenZip.initSevenZipFromPlatformJAR(); } this.context = context; this.fileTypeDetector = fileTypeDetector; this.moduleDirRelative = moduleDirRelative; this.moduleDirAbsolute = moduleDirAbsolute; } /** * Checks whether extraction is supported for a file, based on MIME type. * * @param file The file. * * @return This method returns true if the file format is currently * supported. Else it returns false. */ boolean isSevenZipExtractionSupported(AbstractFile file) { String fileMimeType = fileTypeDetector.getMIMEType(file); for (SupportedArchiveExtractionFormats mimeType : SupportedArchiveExtractionFormats.values()) { if (mimeType.toString().equals(fileMimeType)) { return true; } } return false; } /** * Check if the item inside archive is a potential zipbomb * * Currently checks compression ratio. * * More heuristics to be added here * * @param archiveName the parent archive * @param archiveFileItem the archive item * * @return true if potential zip bomb, false otherwise */ private boolean isZipBombArchiveItemCheck(AbstractFile archiveFile, ISimpleInArchiveItem archiveFileItem) { try { final Long archiveItemSize = archiveFileItem.getSize(); //skip the check for small files if (archiveItemSize == null || archiveItemSize < MIN_COMPRESSION_RATIO_SIZE) { return false; } final Long archiveItemPackedSize = archiveFileItem.getPackedSize(); if (archiveItemPackedSize == null || archiveItemPackedSize <= 0) { logger.log(Level.WARNING, "Cannot getting compression ratio, cannot detect if zipbomb: {0}, item: {1}", new Object[]{archiveFile.getName(), archiveFileItem.getPath()}); //NON-NLS return false; } int cRatio = (int) (archiveItemSize / archiveItemPackedSize); if (cRatio >= MAX_COMPRESSION_RATIO) { String itemName = archiveFileItem.getPath(); logger.log(Level.INFO, "Possible zip bomb detected, compression ration: {0} for in archive item: {1}", new Object[]{cRatio, itemName}); //NON-NLS String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), itemName); String path; try { path = archiveFile.getUniquePath(); } catch (TskCoreException ex) { path = archiveFile.getParentPath() + archiveFile.getName(); } String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnDetails", cRatio, path); //MessageNotifyUtil.Notify.error(msg, details); services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); return true; } else { return false; } } catch (SevenZipException ex) { logger.log(Level.WARNING, "Error getting archive item size and cannot detect if zipbomb. ", ex); //NON-NLS return false; } } /** * Check file extension and return appropriate input options for * SevenZip.openInArchive() * * @param archiveFile file to check file extension * * @return input parameter for SevenZip.openInArchive() */ private ArchiveFormat get7ZipOptions(AbstractFile archiveFile) { // try to get the file type from the BB String detectedFormat; detectedFormat = archiveFile.getMIMEType(); if (detectedFormat == null) { logger.log(Level.WARNING, "Could not detect format for file: {0}", archiveFile); //NON-NLS // if we don't have attribute info then use file extension String extension = archiveFile.getNameExtension(); if ("rar".equals(extension)) //NON-NLS { // for RAR files we need to open them explicitly as RAR. Otherwise, if there is a ZIP archive inside RAR archive // it will be opened incorrectly when using 7zip's built-in auto-detect functionality return RAR; } // Otherwise open the archive using 7zip's built-in auto-detect functionality return null; } else if (detectedFormat.contains("application/x-rar-compressed")) //NON-NLS { // for RAR files we need to open them explicitly as RAR. Otherwise, if there is a ZIP archive inside RAR archive // it will be opened incorrectly when using 7zip's built-in auto-detect functionality return RAR; } // Otherwise open the archive using 7zip's built-in auto-detect functionality return null; } /** * Get the data source object id of the root data source for the specified * archive * * @param file the archive which the root data source id is being found * * @return the data source object id of the root data source * * @throws TskCoreException */ private long getRootArchiveId(AbstractFile file) throws TskCoreException { long id = file.getId(); Content parentContent = file.getParent(); while (parentContent != null) { id = parentContent.getId(); parentContent = parentContent.getParent(); } return id; } /** * Query the database and get the list of files which exist for this archive * which have already been added to the case database. * * @param archiveFile the archiveFile to get the files associated with * @param archiveFilePath the archive file path that must be contained in * the parent_path of files * * @return the list of files which already exist in the case database for * this archive * * @throws TskCoreException * @throws NoCurrentCaseException */ private List<AbstractFile> getAlreadyExtractedFiles(AbstractFile archiveFile, String archiveFilePath) throws TskCoreException, NoCurrentCaseException { //check if already has derived files, skip //check if local unpacked dir exists if (archiveFile.hasChildren() && new File(moduleDirAbsolute, EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile)).exists()) { return Case.getCurrentCaseThrows().getServices().getFileManager().findFilesByParentPath(getRootArchiveId(archiveFile), archiveFilePath); } return new ArrayList<>(); } /** * Get the archiveFilePath * * @param archiveFile the archiveFile to get the path for * * @return the archiveFilePath to be used by the unpack method */ private String getArchiveFilePath(AbstractFile archiveFile) { try { return archiveFile.getUniquePath(); } catch (TskCoreException ex) { return archiveFile.getParentPath() + archiveFile.getName(); } } /** * Create the local directories if they do not exist for the archive * * @param uniqueArchiveFileName the unique name which corresponds to the * archive file in this datasource */ private void makeLocalDirectories(String uniqueArchiveFileName) { final String localRootAbsPath = getLocalRootAbsPath(uniqueArchiveFileName); final File localRoot = new File(localRootAbsPath); if (!localRoot.exists()) { localRoot.mkdirs(); } } /** * Get the path in the archive of the specified item * * @param item - the item to get the path for * @param itemNumber - the item number to help provide uniqueness to the * path * @param archiveFile - the archive file the item exists in * * @return a string representing the path to the item in the archive * * @throws SevenZipException */ private String getPathInArchive(ISimpleInArchiveItem item, int itemNumber, AbstractFile archiveFile) throws SevenZipException { String pathInArchive = item.getPath(); if (pathInArchive == null || pathInArchive.isEmpty()) { //some formats (.tar.gz) may not be handled correctly -- file in archive has no name/path //handle this for .tar.gz and tgz but assuming the child is tar, //otherwise, unpack using itemNumber as name //TODO this should really be signature based, not extension based String archName = archiveFile.getName(); int dotI = archName.lastIndexOf("."); String useName = null; if (dotI != -1) { String base = archName.substring(0, dotI); String ext = archName.substring(dotI); int colonIndex = ext.lastIndexOf(":"); if (colonIndex != -1) { // If alternate data stream is found, fix the name // so Windows doesn't choke on the colon character. ext = ext.substring(0, colonIndex); } switch (ext) { case ".gz": //NON-NLS useName = base; break; case ".tgz": //NON-NLS useName = base + ".tar"; //NON-NLS break; case ".bz2": //NON-NLS useName = base; break; } } if (useName == null) { pathInArchive = "/" + archName + "/" + Integer.toString(itemNumber); } else { pathInArchive = "/" + useName; } String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.unknownPath.msg", getArchiveFilePath(archiveFile), pathInArchive); logger.log(Level.WARNING, msg); } return pathInArchive; } /* * Get the String that will represent the key for the hashmap which keeps * track of existing files from an AbstractFile */ private String getKeyAbstractFile(AbstractFile fileInDatabase) { return fileInDatabase == null ? null : fileInDatabase.getParentPath() + fileInDatabase.getName(); } /* * Get the String that will represent the key for the hashmap which keeps * track of existing files from an unpacked node and the archiveFilePath */ private String getKeyFromUnpackedNode(UnpackedTree.UnpackedNode node, String archiveFilePath) { return node == null ? null : archiveFilePath + "/" + node.getFileName(); } /** * Unpack an archive item to the disk using a password if specified. * * @param item - the archive item to unpack * @param unpackedNode - the unpackedNode to add derivedInfo to * @param password - the password for the archive, null if not * used * @param freeDiskSpace - the amount of free disk space * @param uniqueExtractedName - the name of the file to extract the item to * * @return unpackedNode - the updated unpackedNode * * @throws SevenZipException */ private SevenZipExtractor.UnpackedTree.UnpackedNode unpackNode(ISimpleInArchiveItem item, SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode, String password, long freeDiskSpace, String uniqueExtractedName) throws SevenZipException { //unpack locally if a file final String localAbsPath = moduleDirAbsolute + File.separator + uniqueExtractedName; final String localRelPath = moduleDirRelative + File.separator + uniqueExtractedName; final Date createTime = item.getCreationTime(); final Date accessTime = item.getLastAccessTime(); final Date writeTime = item.getLastWriteTime(); final long createtime = createTime == null ? 0L : createTime.getTime() / 1000; final long modtime = writeTime == null ? 0L : writeTime.getTime() / 1000; final long accesstime = accessTime == null ? 0L : accessTime.getTime() / 1000; SevenZipExtractor.UnpackStream unpackStream = null; boolean isDir = item.isFolder(); if (!isDir) { try { // NOTE: item.getSize() may return null in case of certain // archiving formats. Eg: BZ2 if (item.getSize() != null) { unpackStream = new SevenZipExtractor.KnownSizeUnpackStream(localAbsPath, item.getSize()); } else { unpackStream = new SevenZipExtractor.UnknownSizeUnpackStream(localAbsPath, freeDiskSpace); } ExtractOperationResult result; if (password == null) { result = item.extractSlow(unpackStream); } else { result = item.extractSlow(unpackStream, password); } if (result != ExtractOperationResult.OK) { logger.log(Level.WARNING, "Extraction of : {0} encountered error {1}", new Object[]{localAbsPath, result}); //NON-NLS return null; } } catch (SevenZipException e) { //could be something unexpected with this file, move on logger.log(Level.WARNING, "Could not extract file from archive: " + localAbsPath, e); //NON-NLS } finally { if (unpackStream != null) { //record derived data in unode, to be traversed later after unpacking the archive unpackedNode.addDerivedInfo(unpackStream.getSize(), !isDir, 0L, createtime, accesstime, modtime, localRelPath); unpackStream.close(); } } } else { // this is a directory, size is always 0 unpackedNode.addDerivedInfo(0, !isDir, 0L, createtime, accesstime, modtime, localRelPath); } return unpackedNode; } /** * Unpack the file to local folder and return a list of derived files * * @param archiveFile file to unpack * @param depthMap - a concurrent hashmap which keeps track of the depth * of all nested archives * * @return true if unpacking is complete */ void unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthMap) { unpack(archiveFile, depthMap, null); } /** * Unpack the file to local folder and return a list of derived files, use * the password if specified. * * @param archiveFile - file to unpack * @param depthMap - a concurrent hashmap which keeps track of the depth * of all nested archives * @param password - the password to use, null for no password * * @return true if unpacking is complete */ @Messages({"SevenZipExtractor.indexError.message=Failed to index encryption detected artifact for keyword search."}) boolean unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthMap, String password) { boolean unpackSuccessful = true; //initialized to true change to false if any files fail to extract and boolean hasEncrypted = false; boolean fullEncryption = true; boolean progressStarted = false; int processedItems = 0; final String archiveFilePath = getArchiveFilePath(archiveFile); final String escapedArchiveFilePath = FileUtil.escapeFileName(archiveFilePath); HashMap<String, ZipFileStatusWrapper> statusMap = new HashMap<>(); List<AbstractFile> unpackedFiles = Collections.<AbstractFile>emptyList(); ISevenZipInArchive inArchive = null; SevenZipContentReadStream stream = null; final ProgressHandle progress = ProgressHandle.createHandle(Bundle.EmbeddedFileExtractorIngestModule_ArchiveExtractor_moduleName()); //recursion depth check for zip bomb final long archiveId = archiveFile.getId(); Archive parentAr; try { blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard(); } catch (NoCurrentCaseException ex) { logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS unpackSuccessful = false; return unpackSuccessful; } try { List<AbstractFile> existingFiles = getAlreadyExtractedFiles(archiveFile, archiveFilePath); for (AbstractFile file : existingFiles) { statusMap.put(getKeyAbstractFile(file), new ZipFileStatusWrapper(file, ZipFileStatus.EXISTS)); } } catch (TskCoreException e) { logger.log(Level.INFO, "Error checking if file already has been processed, skipping: {0}", escapedArchiveFilePath); //NON-NLS unpackSuccessful = false; return unpackSuccessful; } catch (NoCurrentCaseException ex) { logger.log(Level.INFO, "No open case was found while trying to unpack the archive file {0}", escapedArchiveFilePath); //NON-NLS unpackSuccessful = false; return unpackSuccessful; } parentAr = depthMap.get(archiveId); if (parentAr == null) { parentAr = new Archive(null, archiveId, 0); depthMap.put(archiveId, parentAr); } else if (parentAr.getDepth() == MAX_DEPTH) { String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnMsg.zipBomb", archiveFile.getName()); String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.warnDetails.zipBomb", parentAr.getDepth(), escapedArchiveFilePath); //MessageNotifyUtil.Notify.error(msg, details); services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); unpackSuccessful = false; return unpackSuccessful; } try { stream = new SevenZipContentReadStream(new ReadContentInputStream(archiveFile)); // for RAR files we need to open them explicitly as RAR. Otherwise, if there is a ZIP archive inside RAR archive // it will be opened incorrectly when using 7zip's built-in auto-detect functionality. // All other archive formats are still opened using 7zip built-in auto-detect functionality. ArchiveFormat options = get7ZipOptions(archiveFile); if (password == null) { inArchive = SevenZip.openInArchive(options, stream); } else { inArchive = SevenZip.openInArchive(options, stream, password); } int numItems = inArchive.getNumberOfItems(); logger.log(Level.INFO, "Count of items in archive: {0}: {1}", new Object[]{escapedArchiveFilePath, numItems}); //NON-NLS progress.start(numItems); progressStarted = true; final ISimpleInArchive simpleInArchive = inArchive.getSimpleInterface(); //setup the archive local root folder final String uniqueArchiveFileName = FileUtil.escapeFileName(EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile)); try { makeLocalDirectories(uniqueArchiveFileName); } catch (SecurityException e) { logger.log(Level.SEVERE, "Error setting up output path for archive root: {0}", getLocalRootAbsPath(uniqueArchiveFileName)); //NON-NLS //bail unpackSuccessful = false; return unpackSuccessful; } //initialize tree hierarchy to keep track of unpacked file structure SevenZipExtractor.UnpackedTree unpackedTree = new SevenZipExtractor.UnpackedTree(moduleDirRelative + "/" + uniqueArchiveFileName, archiveFile); long freeDiskSpace; try { freeDiskSpace = services.getFreeDiskSpace(); } catch (NullPointerException ex) { //If ingest has not been run at least once getFreeDiskSpace() will throw a null pointer exception //currently getFreeDiskSpace always returns DISK_FREE_SPACE_UNKNOWN freeDiskSpace = IngestMonitor.DISK_FREE_SPACE_UNKNOWN; } //unpack and process every item in archive int itemNumber = 0; for (ISimpleInArchiveItem item : simpleInArchive.getArchiveItems()) { String pathInArchive = getPathInArchive(item, itemNumber, archiveFile); //query for path in db ++itemNumber; //check if possible zip bomb if (isZipBombArchiveItemCheck(archiveFile, item)) { unpackSuccessful = false; return unpackSuccessful; } SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode = unpackedTree.addNode(pathInArchive); //update progress bar progress.progress(archiveFile.getName() + ": " + item.getPath(), processedItems); final boolean isEncrypted = item.isEncrypted(); if (isEncrypted && password == null) { logger.log(Level.WARNING, "Skipping encrypted file in archive: {0}", pathInArchive); //NON-NLS hasEncrypted = true; unpackSuccessful = false; continue; } else { fullEncryption = false; } // NOTE: item.getSize() may return null in case of certain // archiving formats. Eg: BZ2 //check if unpacking this file will result in out of disk space //this is additional to zip bomb prevention mechanism if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && item.getSize() != null && item.getSize() > 0) { //if free space is known and file is not empty. long newDiskSpace = freeDiskSpace - item.getSize(); if (newDiskSpace < MIN_FREE_DISK_SPACE) { String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.msg", escapedArchiveFilePath, item.getPath()); String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details"); //MessageNotifyUtil.Notify.error(msg, details); services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); logger.log(Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new String[]{escapedArchiveFilePath, item.getPath()}); //NON-NLS logger.log(Level.INFO, "Available disk space: {0}", new Object[]{freeDiskSpace}); //NON-NLS unpackSuccessful = false; continue; //skip this file } else { //update est. disk space during this archive, so we don't need to poll for every file extracted freeDiskSpace = newDiskSpace; } } final String uniqueExtractedName = FileUtil.escapeFileName(uniqueArchiveFileName + File.separator + (item.getItemIndex() / 1000) + File.separator + item.getItemIndex() + "_" + new File(pathInArchive).getName()); //create local dirs and empty files before extracted File localFile = new java.io.File(moduleDirAbsolute + File.separator + uniqueExtractedName); //cannot rely on files in top-bottom order if (!localFile.exists()) { try { if (item.isFolder()) { localFile.mkdirs(); } else { localFile.getParentFile().mkdirs(); try { localFile.createNewFile(); } catch (IOException e) { logger.log(Level.SEVERE, "Error creating extracted file: " + localFile.getAbsolutePath(), e); //NON-NLS } } } catch (SecurityException e) { logger.log(Level.SEVERE, "Error setting up output path for unpacked file: {0}", pathInArchive); //NON-NLS //TODO consider bail out / msg to the user } } // skip the rest of this loop if we couldn't create the file if (localFile.exists() == false) { continue; } //find this node in the hierarchy, create if neede; unpackedNode = unpackNode(item, unpackedNode, password, freeDiskSpace, uniqueExtractedName); if (unpackedNode == null) { unpackSuccessful = false; } //update units for progress bar ++processedItems; } // add them to the DB. We wait until the end so that we have the metadata on all of the // intermediate nodes since the order is not guaranteed try { unpackedTree.updateOrAddFileToCaseRec(statusMap, archiveFilePath); unpackedFiles = unpackedTree.getAllFileObjects(); //check if children are archives, update archive depth tracking for (AbstractFile unpackedFile : unpackedFiles) { if (unpackedFile == null) { continue; } if (isSevenZipExtractionSupported(unpackedFile)) { Archive child = new Archive(parentAr, unpackedFile.getId(), parentAr.getDepth() + 1); parentAr.addChild(child); depthMap.put(unpackedFile.getId(), child); } } } catch (TskCoreException | NoCurrentCaseException e) { logger.log(Level.SEVERE, "Error populating complete derived file hierarchy from the unpacked dir structure", e); //NON-NLS //TODO decide if anything to cleanup, for now bailing } } catch (SevenZipException ex) { logger.log(Level.WARNING, "Error unpacking file: " + archiveFile, ex); //NON-NLS //inbox message // print a message if the file is allocated if (archiveFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) { String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.msg", archiveFile.getName()); String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details", escapedArchiveFilePath, ex.getMessage()); services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); } } finally { if (inArchive != null) { try { inArchive.close(); } catch (SevenZipException e) { logger.log(Level.SEVERE, "Error closing archive: " + archiveFile, e); //NON-NLS } } if (stream != null) { try { stream.close(); } catch (IOException ex) { logger.log(Level.SEVERE, "Error closing stream after unpacking archive: " + archiveFile, ex); //NON-NLS } } //close progress bar if (progressStarted) { progress.finish(); } } //create artifact and send user message if (hasEncrypted) { String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL; try { BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED); artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, EmbeddedFileExtractorModuleFactory.getModuleName(), encryptionType)); try { // index the artifact for keyword search blackboard.indexArtifact(artifact); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS MessageNotifyUtil.Notify.error( Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName()); } services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + escapedArchiveFilePath, ex); //NON-NLS } String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg"); String details = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details", archiveFile.getName(), EmbeddedFileExtractorModuleFactory.getModuleName()); services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details)); } // adding unpacked extracted derived files to the job after closing relevant resources. if (!unpackedFiles.isEmpty()) { //currently sending a single event for all new files services.fireModuleContentEvent(new ModuleContentEvent(archiveFile)); if (context != null) { context.addFilesToJob(unpackedFiles); } } return unpackSuccessful; } /** * Stream used to unpack the archive to local file */ private abstract static class UnpackStream implements ISequentialOutStream { private OutputStream output; private String localAbsPath; UnpackStream(String localAbsPath) { this.localAbsPath = localAbsPath; try { output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1); } catch (IOException ex) { logger.log(Level.SEVERE, "Error writing extracted file: " + localAbsPath, ex); //NON-NLS } } public abstract long getSize(); OutputStream getOutput() { return output; } String getLocalAbsPath() { return localAbsPath; } public void close() { if (output != null) { try { output.flush(); output.close(); } catch (IOException e) { logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS } } } } /** * Stream used to unpack the archive of unknown size to local file */ private static class UnknownSizeUnpackStream extends UnpackStream { private long freeDiskSpace; private boolean outOfSpace = false; private long bytesWritten = 0; UnknownSizeUnpackStream(String localAbsPath, long freeDiskSpace) { super(localAbsPath); this.freeDiskSpace = freeDiskSpace; } @Override public long getSize() { return this.bytesWritten; } @Override public int write(byte[] bytes) throws SevenZipException { try { // If the content size is unknown, cautiously write to disk. // Write only if byte array is less than 80% of the current // free disk space. if (freeDiskSpace == IngestMonitor.DISK_FREE_SPACE_UNKNOWN || bytes.length < 0.8 * freeDiskSpace) { getOutput().write(bytes); // NOTE: this method is called multiple times for a // single extractSlow() call. Update bytesWritten and // freeDiskSpace after every write operation. this.bytesWritten += bytes.length; this.freeDiskSpace -= bytes.length; } else { this.outOfSpace = true; logger.log(Level.INFO, NbBundle.getMessage( SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg")); throw new SevenZipException( NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg")); } } catch (IOException ex) { throw new SevenZipException( NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg", getLocalAbsPath()), ex); } return bytes.length; } @Override public void close() { if (getOutput() != null) { try { getOutput().flush(); getOutput().close(); if (this.outOfSpace) { Files.delete(Paths.get(getLocalAbsPath())); } } catch (IOException e) { logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", getLocalAbsPath()); //NON-NLS } } } } /** * Stream used to unpack the archive of known size to local file */ private static class KnownSizeUnpackStream extends UnpackStream { private long size; KnownSizeUnpackStream(String localAbsPath, long size) { super(localAbsPath); this.size = size; } @Override public long getSize() { return this.size; } @Override public int write(byte[] bytes) throws SevenZipException { try { getOutput().write(bytes); } catch (IOException ex) { throw new SevenZipException( NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg", getLocalAbsPath()), ex); } return bytes.length; } } /** * Representation of the files in the archive. Used to track of local tree * file hierarchy, archive depth, and files created to easily and reliably * get parent AbstractFile for unpacked file. So that we don't have to * depend on type of traversal of unpacked files handed to us by 7zip * unpacker. */ private class UnpackedTree { final UnpackedNode rootNode; /** * * @param localPathRoot Path in module output folder that files will be * saved to * @param archiveFile Archive file being extracted * @param fileManager */ UnpackedTree(String localPathRoot, AbstractFile archiveFile) { this.rootNode = new UnpackedNode(); this.rootNode.setFile(archiveFile); this.rootNode.setFileName(archiveFile.getName()); this.rootNode.setLocalRelPath(localPathRoot); } /** * Creates a node in the tree at the given path. Makes intermediate * nodes if needed. If a node already exists at that path, it is * returned. * * @param filePath file path with 1 or more tokens separated by / * * @return child node for the last file token in the filePath */ UnpackedNode addNode(String filePath) { String[] toks = filePath.split("[\\/\\\\]"); List<String> tokens = new ArrayList<>(); for (int i = 0; i < toks.length; ++i) { if (!toks[i].isEmpty()) { tokens.add(toks[i]); } } return addNode(rootNode, tokens); } /** * recursive method that traverses the path * * @param parent * @param tokenPath * * @return */ private UnpackedNode addNode(UnpackedNode parent, List<String> tokenPath) { // we found all of the tokens if (tokenPath.isEmpty()) { return parent; } // get the next name in the path and look it up String childName = tokenPath.remove(0); UnpackedNode child = parent.getChild(childName); // create new node if (child == null) { child = new UnpackedNode(childName, parent); parent.addChild(child); } // go down one more level return addNode(child, tokenPath); } /** * Get the root file objects (after createDerivedFiles() ) of this tree, * so that they can be rescheduled. * * @return root objects of this unpacked tree */ List<AbstractFile> getRootFileObjects() { List<AbstractFile> ret = new ArrayList<>(); for (UnpackedNode child : rootNode.getChildren()) { ret.add(child.getFile()); } return ret; } /** * Get the all file objects (after createDerivedFiles() ) of this tree, * so that they can be rescheduled. * * @return all file objects of this unpacked tree */ List<AbstractFile> getAllFileObjects() { List<AbstractFile> ret = new ArrayList<>(); for (UnpackedNode child : rootNode.getChildren()) { getAllFileObjectsRec(ret, child); } return ret; } private void getAllFileObjectsRec(List<AbstractFile> list, UnpackedNode parent) { list.add(parent.getFile()); for (UnpackedNode child : parent.getChildren()) { getAllFileObjectsRec(list, child); } } /** * Traverse the tree top-down after unzipping is done and create derived * files for the entire hierarchy */ void updateOrAddFileToCaseRec(HashMap<String, ZipFileStatusWrapper> statusMap, String archiveFilePath) throws TskCoreException, NoCurrentCaseException { final FileManager fileManager = Case.getCurrentCaseThrows().getServices().getFileManager(); for (UnpackedNode child : rootNode.getChildren()) { updateOrAddFileToCaseRec(child, fileManager, statusMap, archiveFilePath); } } /** * Add derived files to the case if they do not exist, update the * derived file data if the new file contains more information than the * existing one, and do nothing if the existing information is complete. * * @param node - the UnpackedNode for the file which is being * added or updated * @param fileManager - the file manager to perform the adding or * updating * @param statusMap - the map of existing files and their status * @param archiveFilePath - the archive file path for the unpacked node * * @throws TskCoreException */ private void updateOrAddFileToCaseRec(UnpackedNode node, FileManager fileManager, HashMap<String, ZipFileStatusWrapper> statusMap, String archiveFilePath) throws TskCoreException { DerivedFile df; try { String nameInDatabase = getKeyFromUnpackedNode(node, archiveFilePath); ZipFileStatusWrapper existingFile = nameInDatabase == null ? null : statusMap.get(nameInDatabase); if (existingFile == null) { df = fileManager.addDerivedFile(node.getFileName(), node.getLocalRelPath(), node.getSize(), node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(), node.isIsFile(), node.getParent().getFile(), "", EmbeddedFileExtractorModuleFactory.getModuleName(), "", "", TskData.EncodingType.XOR1); statusMap.put(getKeyAbstractFile(df), new ZipFileStatusWrapper(df, ZipFileStatus.EXISTS)); } else { String key = getKeyAbstractFile(existingFile.getFile()); if (existingFile.getStatus() == ZipFileStatus.EXISTS && existingFile.getFile().getSize() < node.getSize()) { existingFile.setStatus(ZipFileStatus.UPDATE); statusMap.put(key, existingFile); } if (existingFile.getStatus() == ZipFileStatus.UPDATE) { //if the we are updating a file and its mime type was octet-stream we want to re-type it String mimeType = existingFile.getFile().getMIMEType().equalsIgnoreCase("application/octet-stream") ? null : existingFile.getFile().getMIMEType(); df = fileManager.updateDerivedFile((DerivedFile) existingFile.getFile(), node.getLocalRelPath(), node.getSize(), node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(), node.isIsFile(), mimeType, "", EmbeddedFileExtractorModuleFactory.getModuleName(), "", "", TskData.EncodingType.XOR1); } else { //ALREADY CURRENT - SKIP statusMap.put(key, new ZipFileStatusWrapper(existingFile.getFile(), ZipFileStatus.SKIP)); df = (DerivedFile) existingFile.getFile(); } } node.setFile(df); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding a derived file to db:" + node.getFileName(), ex); //NON-NLS throw new TskCoreException( NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackedTree.exception.msg", node.getFileName()), ex); } //recurse adding the children if this file was incomplete the children presumably need to be added for (UnpackedNode child : node.getChildren()) { updateOrAddFileToCaseRec(child, fileManager, statusMap, getKeyFromUnpackedNode(node, archiveFilePath)); } } /** * A node in the unpacked tree that represents a file or folder. */ private class UnpackedNode { private String fileName; private AbstractFile file; private List<UnpackedNode> children = new ArrayList<>(); private String localRelPath = ""; private long size; private long ctime, crtime, atime, mtime; private boolean isFile; private UnpackedNode parent; //root constructor UnpackedNode() { } //child node constructor UnpackedNode(String fileName, UnpackedNode parent) { this.fileName = fileName; this.parent = parent; this.localRelPath = parent.getLocalRelPath() + File.separator + fileName; } long getCtime() { return ctime; } long getCrtime() { return crtime; } long getAtime() { return atime; } long getMtime() { return mtime; } void setFileName(String fileName) { this.fileName = fileName; } /** * Add a child to the list of child nodes associated with this node. * * @param child - the node which is a child node of this node */ void addChild(UnpackedNode child) { children.add(child); } /** * Get this nodes list of child UnpackedNode * * @return children - the UnpackedNodes which are children of this * node. */ List<UnpackedNode> getChildren() { return children; } /** * Gets the parent node of this node. * * @return - the parent UnpackedNode */ UnpackedNode getParent() { return parent; } void addDerivedInfo(long size, boolean isFile, long ctime, long crtime, long atime, long mtime, String relLocalPath) { this.size = size; this.isFile = isFile; this.ctime = ctime; this.crtime = crtime; this.atime = atime; this.mtime = mtime; this.localRelPath = relLocalPath; } void setFile(AbstractFile file) { this.file = file; } /** * get child by name or null if it doesn't exist * * @param childFileName * * @return */ UnpackedNode getChild(String childFileName) { UnpackedNode ret = null; for (UnpackedNode child : children) { if (child.getFileName().equals(childFileName)) { ret = child; break; } } return ret; } String getFileName() { return fileName; } AbstractFile getFile() { return file; } String getLocalRelPath() { return localRelPath; } /** * Set the local relative path associated with this UnpackedNode * * @param localRelativePath - the local relative path to be * associated with this node. */ void setLocalRelPath(String localRelativePath) { localRelPath = localRelativePath; } long getSize() { return size; } boolean isIsFile() { return isFile; } } } static class Archive { private final int depth; private final long objectId; private final Archive parent; private final List<Archive> children; Archive(Archive parent, long objectId, int depth) { this.parent = parent; this.objectId = objectId; this.children = new ArrayList<>(); this.depth = depth; } /** * Add a child to the list of child archives associated with this * archive. * * @param child - the archive which is a child archive of this archive */ void addChild(Archive child) { children.add(child); } /** * Get the object id of the parent of this archive. * * @return parent.objectId - the unique objectId of this archives parent * archive */ long getParentObjectId() { return parent.getObjectId(); } /** * Get the object id of this archive. * * @return objectId - the unique objectId of this archive */ long getObjectId() { return objectId; } /** * Get archive depth of this archive * * @return depth - an integer representing how many layers of archives * this archive is inside. */ int getDepth() { return depth; } } /** * A class which wraps an AbstractFile and an enum identifing whether the * file which exists in the case database is current */ private final class ZipFileStatusWrapper { private final AbstractFile abstractFile; private ZipFileStatus zipStatus; /** * Construct a ZipFileStatusWrapper to wrap the given AbstractFile and * status * * @param file - The AbstractFile which exists in the case database * @param status - an indicator of if the file information is current */ private ZipFileStatusWrapper(AbstractFile file, ZipFileStatus status) { abstractFile = file; zipStatus = status; } /** * Get the AbstractFile contained in this object * * @return abstractFile - The abstractFile this object wraps */ private AbstractFile getFile() { return abstractFile; } /** * Get whether the file should be skipped or updated * * @return zipStatus - an Enum value indicating if the file is current */ private ZipFileStatus getStatus() { return zipStatus; } /** * Set the zipStatus of the file being wrapped when it changes * * @param status - an Enum value indicating if the file is current */ private void setStatus(ZipFileStatus status) { zipStatus = status; } } /** * The status of the file from the archive in regards to whether it should * be updated */ private enum ZipFileStatus { UPDATE, //Should be updated //NON-NLS SKIP, //File is current can be skipped //NON-NLS EXISTS //File exists but it is unknown if it is current //NON-NLS } }
3819 additional clean up and removal of unnecessary fields depth fix
Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
3819 additional clean up and removal of unnecessary fields depth fix
<ide><path>ore/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java <ide> } <ide> parentAr = depthMap.get(archiveId); <ide> if (parentAr == null) { <del> parentAr = new Archive(null, archiveId, 0); <add> parentAr = new Archive(archiveId, 0); <ide> depthMap.put(archiveId, parentAr); <ide> } else if (parentAr.getDepth() == MAX_DEPTH) { <ide> String msg = NbBundle.getMessage(SevenZipExtractor.class, <ide> continue; <ide> } <ide> if (isSevenZipExtractionSupported(unpackedFile)) { <del> Archive child = new Archive(parentAr, unpackedFile.getId(), parentAr.getDepth() + 1); <add> Archive child = new Archive(unpackedFile.getId(), parentAr.getDepth() + 1); <ide> parentAr.addChild(child); <ide> depthMap.put(unpackedFile.getId(), child); <ide> } <ide> <ide> private String fileName; <ide> private AbstractFile file; <del> private List<UnpackedNode> children = new ArrayList<>(); <add> private final List<UnpackedNode> children = new ArrayList<>(); <ide> private String localRelPath = ""; <ide> private long size; <ide> private long ctime, crtime, atime, mtime; <ide> <ide> private final int depth; <ide> private final long objectId; <del> private final Archive parent; <ide> private final List<Archive> children; <ide> <del> Archive(Archive parent, long objectId, int depth) { <del> this.parent = parent; <add> Archive(long objectId, int depth) { <ide> this.objectId = objectId; <ide> this.children = new ArrayList<>(); <ide> this.depth = depth; <ide> */ <ide> void addChild(Archive child) { <ide> children.add(child); <del> } <del> <del> /** <del> * Get the object id of the parent of this archive. <del> * <del> * @return parent.objectId - the unique objectId of this archives parent <del> * archive <del> */ <del> long getParentObjectId() { <del> return parent.getObjectId(); <ide> } <ide> <ide> /**
JavaScript
mit
2bfd8c105cb16ce47dc76e8ffb0f0c60e2700dd5
0
utarsuno/quasar_source,utarsuno/quasar_source,utarsuno/quasar_source,utarsuno/quasar_source,utarsuno/quasar_source,utarsuno/quasar_source,utarsuno/quasar_source
'use strict' function Floating3DText(w, h, text, type, scene) { this.__init__(w, h, text, type, scene) } //const GLOBAL_FONT = new THREE.Font(JSON.parse(document.getElementById('font_3d').innerHTML)) // TODO : Abstract away the logic from the rendering in different classes. Floating3DText.prototype = { // Only used for Password type texts. _hidden_text: null, text: null, width: null, size: null, height: null, material: null, text_geometry: null, object3d: null, current_text_object: null, scene: null, wireframe: null, // States. being_looked_at: null, being_engaged_with: null, current_color: null, // Properties. type: null, // original_border_color: null, // also_color_this_floating_text: null, __init__: function(w, h, text, type, scene) { this.scene = scene this.width = w this.height = h this.text = text this._hidden_text = '' this.being_looked_at = false this.being_engaged_with = false this.original_border_color = 0xFFC0CB this.type = type this.create_outline() this.create() }, get_text: function() { if (this.type === TYPE_INPUT_PASSWORD) { return this._hidden_text } return this.text }, look_at: function() { if (this.being_looked_at === false) { this.wireframe.material.color.setHex(COLOR_HIGHLIGHT) this.update_text_color(this.text, COLOR_TEXT_HIGHLIGHT) if (this.also_color_this_floating_text !== null) { this.also_color_this_floating_text.update_just_color(COLOR_TEXT_HIGHLIGHT) } } this.being_looked_at = true }, look_away: function() { if (this.being_looked_at) { this.wireframe.material.color.setHex(this.original_border_color) this.update_text_color(this.text, COLOR_TEXT_DEFAULT) if (this.also_color_this_floating_text !== null) { this.also_color_this_floating_text.update_just_color(COLOR_TEXT_DEFAULT) } } this.being_looked_at = false }, disengage: function(player) { if (this.type != TYPE_BUTTON) { this.being_engaged_with = false player.disengage() } }, is_engaged: function() { return this.being_engaged_with }, engage: function(player) { if (this.type != TYPE_BUTTON) { this.being_engaged_with = true player.engage() } }, update_just_color: function(color_arg) { this.material.color.setHex(color_arg) this.material.needsUpdate = true }, update_text_color: function(text, color_arg) { if (this.current_color !== color_arg) { this.material.color.setHex(color_arg) this.material.needsUpdate = true } if (this.current_text_object !== null) { this.object3d.remove(this.current_text_object) this.current_text_object.geometry.dispose() this.current_text_object.material.dispose() } console.log(this.size) console.log(this.height) console.log(GLOBAL_FONT) this.text_geometry = new THREE.TextGeometry(this.text, { size: this.size, height: this.height, curveSegments: 2, font: GLOBAL_FONT }) this.current_text = text this.current_text_object = new THREE.Mesh(this.text_geometry, this.material) this.object3d.add(this.current_text_object) }, update_text: function(text) { if (this.current_text !== text) { this.update_text_color(text, COLOR_TEXT_DEFAULT) } }, create_outline: function() { this.material = new THREE.MeshLambertMaterial({color: COLOR_TEXT_DEFAULT}) this.current_color = COLOR_TEXT_DEFAULT this.material.needsUpdate = true this.object3d = new THREE.Object3D() // PlaneGeometry takes in a width, height, optionalWidthSegments (default 1), optionalHeightSegments (default 1) this.border_geometry = new THREE.PlaneGeometry(this.width, this.height) // Adds the edge colorings. this.border_mesh = new THREE.Mesh(this.border_geometry, this.material) var border_geo = new THREE.EdgesGeometry(this.border_mesh.geometry) // or WireframeGeometry var border_mat = new THREE.LineBasicMaterial({color: this.original_border_color, linewidth: 3}) this.border_wireframe = new THREE.LineSegments(border_geo, border_mat) this.border_mesh.add(this.border_wireframe) this.object3d.add(this.border_mesh) this.scene.add(this.object3d) }, create: function() { if (this.type == TYPE_TITLE) { this.size = 40 this.height = 2 } else { this.size = 20 this.height = 1 } this.update_text(this.text) // TODO : Make this only 1 sided this.material.side = THREE.DoubleSide }, update_position_and_look_at: function(position_vector, look_at_position) { this.object3d.position.x = position_vector.x this.object3d.position.y = position_vector.y this.object3d.position.z = position_vector.z this.object3d.lookAt(look_at_position) }, add_character: function(character) { this.text = this.text + character this.update_text(this.text) }, pop_character: function() { this.text = this.text.slice(0, -1) this.update_text(this.text) }, parse_keycode: function(event) { var keycode = event.keyCode if (keycode == 8) { if (this.text.length > 0) { this.pop_character() if (this.type == TYPE_INPUT_PASSWORD) { this._hidden_text = this._hidden_text.slice(0, -1) } } GLOBAL_AUDIO.play_typing_sound() } else if (event.key.length == 1) { if (this.type == TYPE_INPUT_PASSWORD) { this._hidden_text += event.key this.add_character('*') } else if (this.type == TYPE_INPUT_REGULAR) { this.add_character(event.key) } GLOBAL_AUDIO.play_typing_sound() } } }
quasar_source_code/quasar_site_django/static/js/custom/models/floating_3d_text.js
'use strict' function Floating3DText(w, h, text, type, scene) { this.__init__(w, h, text, type, scene) } //const GLOBAL_FONT = new THREE.Font(JSON.parse(document.getElementById('font_3d').innerHTML)) // TODO : Abstract away the logic from the rendering in different classes. Floating3DText.prototype = { // Only used for Password type texts. _hidden_text: null, text: null, width: null, size: null, height: null, material: null, text_geometry: null, object3d: null, current_text_object: null, scene: null, wireframe: null, // States. being_looked_at: null, being_engaged_with: null, current_color: null, // Properties. type: null, // original_border_color: null, // also_color_this_floating_text: null, __init__: function(w, h, text, type, scene) { this.scene = scene this.width = w this.height = h this.text = text this._hidden_text = '' this.being_looked_at = false this.being_engaged_with = false this.original_border_color = 0xFFC0CB this.type = type this.create_outline() this.create() }, get_text: function() { if (this.type === TYPE_INPUT_PASSWORD) { return this._hidden_text } return this.text }, look_at: function() { if (this.being_looked_at === false) { this.wireframe.material.color.setHex(COLOR_HIGHLIGHT) this.update_text_color(this.text, COLOR_TEXT_HIGHLIGHT) if (this.also_color_this_floating_text !== null) { this.also_color_this_floating_text.update_just_color(COLOR_TEXT_HIGHLIGHT) } } this.being_looked_at = true }, look_away: function() { if (this.being_looked_at) { this.wireframe.material.color.setHex(this.original_border_color) this.update_text_color(this.text, COLOR_TEXT_DEFAULT) if (this.also_color_this_floating_text !== null) { this.also_color_this_floating_text.update_just_color(COLOR_TEXT_DEFAULT) } } this.being_looked_at = false }, disengage: function(player) { if (this.type != TYPE_BUTTON) { this.being_engaged_with = false player.disengage() } }, is_engaged: function() { return this.being_engaged_with }, engage: function(player) { if (this.type != TYPE_BUTTON) { this.being_engaged_with = true player.engage() } }, update_just_color: function(color_arg) { this.material.color.setHex(color_arg) this.material.needsUpdate = true }, update_text_color: function(text, color_arg) { if (this.current_color !== color_arg) { this.material.color.setHex(color_arg) this.material.needsUpdate = true } if (this.current_text_object !== null) { this.object3d.remove(this.current_text_object) this.current_text_object.geometry.dispose() this.current_text_object.material.dispose() } console.log(this.size) console.log(this.height) console.log(GLOBAL_FONT) this.text_geometry = new THREE.TextGeometry(this.text, { size: this.size, height: this.height, curveSegments: 2, font: GLOBAL_FONT }) this.current_text = text this.current_text_object = new THREE.Mesh(this.text_geometry, this.material) this.object3d.add(this.current_text_object) }, update_text: function(text) { if (this.current_text !== text) { this.update_text_color(text, COLOR_TEXT_DEFAULT) } }, create_outline: function() { this.material = new THREE.MeshLambertMaterial({color: color_arg}) this.current_color = color_arg this.material.needsUpdate = true this.object3d = new THREE.Object3D() // PlaneGeometry takes in a width, height, optionalWidthSegments (default 1), optionalHeightSegments (default 1) this.border_geometry = new THREE.PlaneGeometry(this.width, this.height) // Adds the edge colorings. this.border_mesh = new THREE.Mesh(this.border_geometry, this.material) var border_geo = new THREE.EdgesGeometry(this.border_mesh.geometry) // or WireframeGeometry var border_mat = new THREE.LineBasicMaterial({color: this.original_border_color, linewidth: 3}) this.border_wireframe = new THREE.LineSegments(border_geo, border_mat) this.border_mesh.add(this.border_wireframe) this.object3d.add(this.border_mesh) this.scene.add(this.object3d) }, create: function() { if (this.type == TYPE_TITLE) { this.size = 40 this.height = 2 } else { this.size = 20 this.height = 1 } this.update_text(this.text) // TODO : Make this only 1 sided this.material.side = THREE.DoubleSide }, update_position_and_look_at: function(position_vector, look_at_position) { this.object3d.position.x = position_vector.x this.object3d.position.y = position_vector.y this.object3d.position.z = position_vector.z this.object3d.lookAt(look_at_position) }, add_character: function(character) { this.text = this.text + character this.update_text(this.text) }, pop_character: function() { this.text = this.text.slice(0, -1) this.update_text(this.text) }, parse_keycode: function(event) { var keycode = event.keyCode if (keycode == 8) { if (this.text.length > 0) { this.pop_character() if (this.type == TYPE_INPUT_PASSWORD) { this._hidden_text = this._hidden_text.slice(0, -1) } } GLOBAL_AUDIO.play_typing_sound() } else if (event.key.length == 1) { if (this.type == TYPE_INPUT_PASSWORD) { this._hidden_text += event.key this.add_character('*') } else if (this.type == TYPE_INPUT_REGULAR) { this.add_character(event.key) } GLOBAL_AUDIO.play_typing_sound() } } }
general updating 03
quasar_source_code/quasar_site_django/static/js/custom/models/floating_3d_text.js
general updating 03
<ide><path>uasar_source_code/quasar_site_django/static/js/custom/models/floating_3d_text.js <ide> }, <ide> <ide> create_outline: function() { <del> this.material = new THREE.MeshLambertMaterial({color: color_arg}) <del> this.current_color = color_arg <add> this.material = new THREE.MeshLambertMaterial({color: COLOR_TEXT_DEFAULT}) <add> this.current_color = COLOR_TEXT_DEFAULT <ide> this.material.needsUpdate = true <ide> <ide>
Java
apache-2.0
d6b1f9673c287b8b12c2ac901cd8d8ab12a408c4
0
johngmyers/platform,johngmyers/platform-rack,johngmyers/platform-rack,cberner/airlift,proofpoint/platform,erichwang/airlift,mono-plane/airlift,dain/airlift,haozhun/airlift,gwittel/platform,zhenyuy-fb/airlift,mono-plane/airlift,erichwang/airlift,martint/airlift,martint/airlift,johngmyers/platform,airlift/airlift-rack,proofpoint/platform,johngmyers/platform-rack,daququ/airlift,electrum/airlift,johngmyers/airlift,johngmyers/platform-rack,haozhun/airlift,erichwang/airlift,cberner/airlift,johngmyers/airlift,cberner/airlift,daququ/airlift,haozhun/airlift,airlift/airlift,airlift/airlift,dain/airlift,zhenyuy-fb/airlift,johngmyers/platform-rack,daququ/airlift,mono-plane/airlift,gwittel/platform,electrum/airlift,airlift/airlift,zhenyuy-fb/airlift,daququ/airlift,proofpoint/platform,haozhun/airlift,cberner/airlift,zhenyuy-fb/airlift,johngmyers/platform,electrum/airlift,johngmyers/platform-rack,johngmyers/airlift,johngmyers/airlift,gwittel/platform,martint/airlift,mono-plane/airlift,dain/airlift,airlift/airlift-rack,electrum/airlift,dain/airlift,airlift/airlift
package com.proofpoint.event.client; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.codehaus.jackson.JsonGenerator; import java.io.IOException; import java.lang.reflect.Method; import java.util.ArrayDeque; import java.util.Comparator; import java.util.Deque; import java.util.List; import static com.google.common.base.Objects.firstNonNull; import static com.proofpoint.event.client.EventDataType.validateFieldValueType; class EventFieldMetadata { public static final Comparator<EventFieldMetadata> NAME_COMPARATOR = new Comparator<EventFieldMetadata>() { public int compare(EventFieldMetadata a, EventFieldMetadata b) { return a.name.compareTo(b.name); } }; private final String name; private final String v1Name; private final Method method; private final EventDataType eventDataType; private final EventTypeMetadata<?> nestedType; private final boolean iterable; EventFieldMetadata(String name, String v1Name, Method method, EventDataType eventDataType, EventTypeMetadata<?> nestedType, boolean iterable) { Preconditions.checkArgument((eventDataType != null) || (nestedType != null), "both eventDataType and nestedType are null"); Preconditions.checkArgument((eventDataType == null) || (nestedType == null), "both eventDataType and nestedType are set"); this.name = name; this.v1Name = v1Name; this.method = method; this.eventDataType = eventDataType; this.nestedType = nestedType; this.iterable = iterable; } @SuppressWarnings("ThrowableResultOfMethodCallIgnored") // IDEA-74322 private Object getValue(Object event) throws InvalidEventException { try { return method.invoke(event); } catch (Exception e) { throw new InvalidEventException(firstNonNull(e.getCause(), e), "Unable to get value of event field %s: Exception occurred while invoking [%s]", name, method.toGenericString()); } } public void writeField(JsonGenerator jsonGenerator, Object event) throws IOException { writeField(jsonGenerator, event, new ArrayDeque<Object>()); } private void writeField(JsonGenerator jsonGenerator, Object event, Deque<Object> objectStack) throws IOException { Object value = getValue(event); if (value != null) { jsonGenerator.writeFieldName(name); if (iterable) { validateFieldValueType(value, Iterable.class); writeArray(jsonGenerator, (Iterable<?>) value, objectStack); } else { writeFieldValue(jsonGenerator, value, objectStack); } } } private void writeFieldValue(JsonGenerator jsonGenerator, Object value, Deque<Object> objectStack) throws IOException { if (eventDataType != null) { eventDataType.writeFieldValue(jsonGenerator, value); } else { validateFieldValueType(value, nestedType.getEventClass()); writeObject(jsonGenerator, value, objectStack); } } private void writeArray(JsonGenerator jsonGenerator, Iterable<?> value, Deque<Object> objectStack) throws IOException { jsonGenerator.writeStartArray(); for (Object item : value) { writeFieldValue(jsonGenerator, item, objectStack); } jsonGenerator.writeEndArray(); } private void writeObject(JsonGenerator jsonGenerator, Object value, Deque<Object> objectStack) throws IOException { checkForCycles(value, objectStack); objectStack.push(value); jsonGenerator.writeStartObject(); for (EventFieldMetadata field : nestedType.getFields()) { field.writeField(jsonGenerator, value, objectStack); } jsonGenerator.writeEndObject(); objectStack.pop(); } private static void checkForCycles(Object value, Deque<Object> objectStack) throws InvalidEventException { for (Object o : objectStack) { if (value == o) { List<Object> path = Lists.reverse(Lists.newArrayList(objectStack)); throw new InvalidEventException("Cycle detected in event data: %s", path); } } } public void writeFieldV1(JsonGenerator jsonGenerator, Object event) throws IOException { Preconditions.checkState(!iterable, "iterable fields not supported for JSON V1"); Preconditions.checkState(nestedType == null, "nested types not supported for JSON V1"); Object value = getValue(event); if (value != null) { jsonGenerator.writeStringField("name", v1Name); jsonGenerator.writeFieldName("value"); eventDataType.writeFieldValue(jsonGenerator, value); } } }
event/src/main/java/com/proofpoint/event/client/EventFieldMetadata.java
package com.proofpoint.event.client; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.codehaus.jackson.JsonGenerator; import java.io.IOException; import java.lang.reflect.Method; import java.util.ArrayDeque; import java.util.Comparator; import java.util.Deque; import java.util.List; import static com.google.common.base.Objects.firstNonNull; import static com.proofpoint.event.client.EventDataType.validateFieldValueType; class EventFieldMetadata { public static final Comparator<EventFieldMetadata> NAME_COMPARATOR = new Comparator<EventFieldMetadata>() { public int compare(EventFieldMetadata a, EventFieldMetadata b) { return a.name.compareTo(b.name); } }; private final String name; private final String v1Name; private final Method method; private final EventDataType eventDataType; private final EventTypeMetadata<?> nestedType; private final boolean iterable; EventFieldMetadata(String name, String v1Name, Method method, EventDataType eventDataType, EventTypeMetadata<?> nestedType, boolean iterable) { Preconditions.checkArgument((eventDataType != null) || (nestedType != null), "both eventDataType and nestedType are null"); Preconditions.checkArgument((eventDataType == null) || (nestedType == null), "both eventDataType and nestedType are set"); this.name = name; this.v1Name = v1Name; this.method = method; this.eventDataType = eventDataType; this.nestedType = nestedType; this.iterable = iterable; } @SuppressWarnings("ThrowableResultOfMethodCallIgnored") // IDEA-74322 private Object getValue(Object event) throws InvalidEventException { try { return method.invoke(event); } catch (Exception e) { throw new InvalidEventException(firstNonNull(e.getCause(), e), "Unable to get value of event field %s: Exception occurred while invoking [%s]", name, method.toGenericString()); } } public void writeField(JsonGenerator jsonGenerator, Object event) throws IOException { writeField(jsonGenerator, event, new ArrayDeque<Object>()); } private void writeField(JsonGenerator jsonGenerator, Object event, Deque<Object> objectStack) throws IOException { Object value = getValue(event); if (value != null) { jsonGenerator.writeFieldName(name); if (iterable) { validateFieldValueType(value, Iterable.class); jsonGenerator.writeStartArray(); for (Object item : (Iterable<?>) value) { writeFieldValue(jsonGenerator, item, objectStack); } jsonGenerator.writeEndArray(); } else { writeFieldValue(jsonGenerator, value, objectStack); } } } private void writeFieldValue(JsonGenerator jsonGenerator, Object value, Deque<Object> objectStack) throws IOException { if (eventDataType != null) { eventDataType.writeFieldValue(jsonGenerator, value); } else { validateFieldValueType(value, nestedType.getEventClass()); for (Object o : objectStack) { if (value == o) { List<Object> path = Lists.reverse(Lists.newArrayList(objectStack)); throw new InvalidEventException("Cycle detected in event data: %s", path); } } objectStack.push(value); jsonGenerator.writeStartObject(); for (EventFieldMetadata field : nestedType.getFields()) { field.writeField(jsonGenerator, value, objectStack); } jsonGenerator.writeEndObject(); objectStack.pop(); } } public void writeFieldV1(JsonGenerator jsonGenerator, Object event) throws IOException { Preconditions.checkState(!iterable, "iterable fields not supported for JSON V1"); Preconditions.checkState(nestedType == null, "nested types not supported for JSON V1"); Object value = getValue(event); if (value != null) { jsonGenerator.writeStringField("name", v1Name); jsonGenerator.writeFieldName("value"); eventDataType.writeFieldValue(jsonGenerator, value); } } }
Extract methods for writing field data
event/src/main/java/com/proofpoint/event/client/EventFieldMetadata.java
Extract methods for writing field data
<ide><path>vent/src/main/java/com/proofpoint/event/client/EventFieldMetadata.java <ide> jsonGenerator.writeFieldName(name); <ide> if (iterable) { <ide> validateFieldValueType(value, Iterable.class); <del> jsonGenerator.writeStartArray(); <del> for (Object item : (Iterable<?>) value) { <del> writeFieldValue(jsonGenerator, item, objectStack); <del> } <del> jsonGenerator.writeEndArray(); <add> writeArray(jsonGenerator, (Iterable<?>) value, objectStack); <ide> } <ide> else { <ide> writeFieldValue(jsonGenerator, value, objectStack); <ide> } <ide> else { <ide> validateFieldValueType(value, nestedType.getEventClass()); <del> for (Object o : objectStack) { <del> if (value == o) { <del> List<Object> path = Lists.reverse(Lists.newArrayList(objectStack)); <del> throw new InvalidEventException("Cycle detected in event data: %s", path); <del> } <add> writeObject(jsonGenerator, value, objectStack); <add> } <add> } <add> <add> private void writeArray(JsonGenerator jsonGenerator, Iterable<?> value, Deque<Object> objectStack) <add> throws IOException <add> { <add> jsonGenerator.writeStartArray(); <add> for (Object item : value) { <add> writeFieldValue(jsonGenerator, item, objectStack); <add> } <add> jsonGenerator.writeEndArray(); <add> } <add> <add> private void writeObject(JsonGenerator jsonGenerator, Object value, Deque<Object> objectStack) <add> throws IOException <add> { <add> checkForCycles(value, objectStack); <add> objectStack.push(value); <add> jsonGenerator.writeStartObject(); <add> for (EventFieldMetadata field : nestedType.getFields()) { <add> field.writeField(jsonGenerator, value, objectStack); <add> } <add> jsonGenerator.writeEndObject(); <add> objectStack.pop(); <add> } <add> <add> private static void checkForCycles(Object value, Deque<Object> objectStack) <add> throws InvalidEventException <add> { <add> for (Object o : objectStack) { <add> if (value == o) { <add> List<Object> path = Lists.reverse(Lists.newArrayList(objectStack)); <add> throw new InvalidEventException("Cycle detected in event data: %s", path); <ide> } <del> objectStack.push(value); <del> jsonGenerator.writeStartObject(); <del> for (EventFieldMetadata field : nestedType.getFields()) { <del> field.writeField(jsonGenerator, value, objectStack); <del> } <del> jsonGenerator.writeEndObject(); <del> objectStack.pop(); <ide> } <ide> } <ide>
Java
apache-2.0
abbf7adbedd48c7418fb4ea5ff60019c7038e532
0
contentful/contentful-management.java,contentful/contentful-management.java,contentful/contentful-management.java
package com.contentful.java.cma.model; import com.contentful.java.cma.model.RateLimits.DefaultParser; import com.google.gson.GsonBuilder; import java.io.IOException; import java.nio.charset.Charset; import java.util.List; import java.util.Locale; import java.util.Map; import okhttp3.Headers; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; import okio.Buffer; import static java.lang.String.format; /** * This class will represent known Contentful exceptions */ public class CMAHttpException extends RuntimeException { /** * Error body potentially delivered with an error request. */ public static class ErrorBody { /** * System properties of an error body */ public static class Sys { String type; String id; /** * @return the type, which should be `Error`. */ public String getType() { return type; } /** * @return the id of the error. */ public String getId() { return id; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "Sys { " + (getId() != null ? "id = " + getId() + ", " : "") + (getType() != null ? "type = " + getType() + " " : "") + "}"; } } /** * Class to describe the error details. */ public static class Details { /** * Which error did actually happen where? */ public static class Error { String name; String details; String type; String filter; Object value; Object path; List<String> expected; /** * @return the name of this error. */ public String getName() { return name; } /** * @return a detailed description of the error. */ public String getDetails() { return details; } /** * @return the type of this error. */ public String getType() { return type; } /** * @return the filter this error produced. */ public String getFilter() { return filter; } /** * @return the value triggering this error. */ public Object getValue() { return value; } /** * @return an invalid type returns an expectation. */ public List<String> getExpected() { return expected; } /** * @return a path contributing to this error. */ public Object getPath() { return path; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "Error { " + (getDetails() != null ? "details = " + getDetails() + ", " : "") + (getFilter() != null ? "filter = " + getFilter() + ", " : "") + (getName() != null ? "name = " + getName() + ", " : "") + (getPath() != null ? "path = " + getPath() + ", " : "") + (getType() != null ? "type = " + getType() + ", " : "") + (getValue() != null ? "value = " + getValue() + " " : "") + "}"; } } String type; String space; List<Error> errors; List<String> keys; /** * @return the type of this detail. */ public String getType() { return type; } /** * @return the space given to the error. */ public String getSpace() { return space; } /** * @return the list of errors encountered */ public List<Error> getErrors() { return errors; } /** * @return a list of keys contributing to this error. */ public List<String> getKeys() { return keys; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "Details { " + (getErrors() != null ? "errors = " + getErrors() + ", " : "") + (getSpace() != null ? "space = " + getSpace() + ", " : "") + (getKeys() != null ? "keys = " + getKeys() + ", " : "") + (getType() != null ? "type = " + getType() + " " : "") + "}"; } } Sys sys; String message; String requestId; Details details; /** * @return the sys of this response, containing the error id. */ public Sys getSys() { return sys; } /** * @return the message this error contained. */ public String getMessage() { return message; } /** * @return the request id used to help the Contentful staff. */ public String getRequestId() { return requestId; } /** * @return more information about the error. */ public Details getDetails() { return details; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "ErrorBody { " + (getDetails() != null ? "details = " + getDetails() + ", " : "") + (getMessage() != null ? "message = " + getMessage() + ", " : "") + (getRequestId() != null ? "requestId = " + getRequestId() + ", " : "") + (getSys() != null ? "sys = " + getSys() + " " : "") + "}"; } } private final Request request; private final Response response; private final RateLimits ratelimits; private ErrorBody errorBody; /** * Construct an error response. * <p> * This constructor will fill the exception with easy accessible values, like * {@link #responseCode()}. {@link #responseMessage()}, but also * {@link #rateLimitReset()}. * * @param request the request issuing the error. * @param response the response from the server to this faulty request. */ public CMAHttpException(Request request, Response response) { this.request = request; this.response = response; try { final String body = response.body() != null ? response.body().string() : null; this.errorBody = new GsonBuilder().create().fromJson(body, ErrorBody.class); } catch (IOException e) { this.errorBody = null; } final Map<String, List<String>> headers = response.headers().toMultimap(); this.ratelimits = new DefaultParser().parse(headers); } /** * Convert exception to human readable form. * * @return a string representing this exception. */ @Override public String toString() { if (errorBody == null) { return format( Locale.getDefault(), "FAILED \n\t%s\n\t↳ Header{%s}%s\n\t%s\n\t↳ Header{%s}", request.toString(), headersToString(request.headers()), maybeBodyToString(request.body()), response.toString(), headersToString(response.headers())); } else { return format( Locale.getDefault(), "FAILED %s\n\t%s\n\t↳ Header{%s}%s\n\t%s\n\t↳ Header{%s}", errorBody.toString(), request.toString(), headersToString(request.headers()), maybeBodyToString(request.body()), response.toString(), headersToString(response.headers())); } } private String maybeBodyToString(RequestBody body) { if (body != null) { final Buffer sink = new Buffer(); try { body.writeTo(sink); final String bodyContent = sink.readString(Charset.defaultCharset()); return "\n\t↳ Body " + bodyContent; } catch (IOException e) { return ""; } } else { return ""; } } /** * @return the response code of the request. */ public int responseCode() { return response.code(); } /** * @return the message the server returned. */ public String responseMessage() { return response.message(); } /** * @return the hourly rate limit or -1 if header not send */ public int rateLimitHourLimit() { return ratelimits.getHourLimit(); } /** * @return the number of remaining requests that can be made in the hour or -1 if header not send */ public int rateLimitHourRemaining() { return ratelimits.getHourRemaining(); } /** * @return the per second rate limit or -1 if header not send */ public int rateLimitSecondLimit() { return ratelimits.getSecondLimit(); } /** * @return the number of remaining requests that can be made per second or -1 if header not send */ public int rateLimitSecondRemaining() { return ratelimits.getSecondRemaining(); } /** * @return the number of seconds until the user can make a next request or -1 if header not send */ public int rateLimitReset() { return ratelimits.getReset(); } /** * @return a modeled error body response. */ public ErrorBody getErrorBody() { return errorBody; } private String headersToString(Headers headers) { final StringBuilder builder = new StringBuilder(); String divider = ""; for (final String name : headers.names()) { final String value = headers.get(name); builder.append(divider); builder.append(name); builder.append(": "); builder.append(value); if ("".equals(divider)) { divider = ", "; } } return builder.toString(); } }
src/main/java/com/contentful/java/cma/model/CMAHttpException.java
package com.contentful.java.cma.model; import com.contentful.java.cma.model.RateLimits.DefaultParser; import com.google.gson.GsonBuilder; import java.io.IOException; import java.util.List; import java.util.Locale; import java.util.Map; import okhttp3.Headers; import okhttp3.Request; import okhttp3.Response; import static java.lang.String.format; /** * This class will represent known Contentful exceptions */ public class CMAHttpException extends RuntimeException { /** * Error body potentially delivered with an error request. */ public static class ErrorBody { /** * System properties of an error body */ public static class Sys { String type; String id; /** * @return the type, which should be `Error`. */ public String getType() { return type; } /** * @return the id of the error. */ public String getId() { return id; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "Sys { " + (getId() != null ? "id = " + getId() + ", " : "") + (getType() != null ? "type = " + getType() + " " : "") + "}"; } } /** * Class to describe the error details. */ public static class Details { /** * Which error did actually happen where? */ public static class Error { String name; String details; String type; String filter; Object value; Object path; List<String> expected; /** * @return the name of this error. */ public String getName() { return name; } /** * @return a detailed description of the error. */ public String getDetails() { return details; } /** * @return the type of this error. */ public String getType() { return type; } /** * @return the filter this error produced. */ public String getFilter() { return filter; } /** * @return the value triggering this error. */ public Object getValue() { return value; } /** * @return an invalid type returns an expectation. */ public List<String> getExpected() { return expected; } /** * @return a path contributing to this error. */ public Object getPath() { return path; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "Error { " + (getDetails() != null ? "details = " + getDetails() + ", " : "") + (getFilter() != null ? "filter = " + getFilter() + ", " : "") + (getName() != null ? "name = " + getName() + ", " : "") + (getPath() != null ? "path = " + getPath() + ", " : "") + (getType() != null ? "type = " + getType() + ", " : "") + (getValue() != null ? "value = " + getValue() + " " : "") + "}"; } } String type; String space; List<Error> errors; List<String> keys; /** * @return the type of this detail. */ public String getType() { return type; } /** * @return the space given to the error. */ public String getSpace() { return space; } /** * @return the list of errors encountered */ public List<Error> getErrors() { return errors; } /** * @return a list of keys contributing to this error. */ public List<String> getKeys() { return keys; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "Details { " + (getErrors() != null ? "errors = " + getErrors() + ", " : "") + (getSpace() != null ? "space = " + getSpace() + ", " : "") + (getKeys() != null ? "keys = " + getKeys() + ", " : "") + (getType() != null ? "type = " + getType() + " " : "") + "}"; } } Sys sys; String message; String requestId; Details details; /** * @return the sys of this response, containing the error id. */ public Sys getSys() { return sys; } /** * @return the message this error contained. */ public String getMessage() { return message; } /** * @return the request id used to help the Contentful staff. */ public String getRequestId() { return requestId; } /** * @return more information about the error. */ public Details getDetails() { return details; } /** * @return a human readable string, representing the object. */ @Override public String toString() { return "ErrorBody { " + (getDetails() != null ? "details = " + getDetails() + ", " : "") + (getMessage() != null ? "message = " + getMessage() + ", " : "") + (getRequestId() != null ? "requestId = " + getRequestId() + ", " : "") + (getSys() != null ? "sys = " + getSys() + " " : "") + "}"; } } private final Request request; private final Response response; private final RateLimits ratelimits; private ErrorBody errorBody; /** * Construct an error response. * <p> * This constructor will fill the exception with easy accessible values, like * {@link #responseCode()}. {@link #responseMessage()}, but also * {@link #rateLimitReset()}. * * @param request the request issuing the error. * @param response the response from the server to this faulty request. */ public CMAHttpException(Request request, Response response) { this.request = request; this.response = response; try { final String body = response.body() != null ? response.body().string() : null; this.errorBody = new GsonBuilder().create().fromJson(body, ErrorBody.class); } catch (IOException e) { this.errorBody = null; } final Map<String, List<String>> headers = response.headers().toMultimap(); this.ratelimits = new DefaultParser().parse(headers); } /** * Convert exception to human readable form. * * @return a string representing this exception. */ @Override public String toString() { if (errorBody == null) { return format( Locale.getDefault(), "FAILED \n\t%s\n\t↳ Header{%s}\n\t%s\n\t↳ Header{%s}", request.toString(), headersToString(request.headers()), response.toString(), headersToString(response.headers())); } else { return format( Locale.getDefault(), "FAILED %s\n\t%s\n\t↳ Header{%s}\n\t%s\n\t↳ Header{%s}", errorBody.toString(), request.toString(), headersToString(request.headers()), response.toString(), headersToString(response.headers())); } } /** * @return the response code of the request. */ public int responseCode() { return response.code(); } /** * @return the message the server returned. */ public String responseMessage() { return response.message(); } /** * @return the hourly rate limit or -1 if header not send */ public int rateLimitHourLimit() { return ratelimits.getHourLimit(); } /** * @return the number of remaining requests that can be made in the hour or -1 if header not send */ public int rateLimitHourRemaining() { return ratelimits.getHourRemaining(); } /** * @return the per second rate limit or -1 if header not send */ public int rateLimitSecondLimit() { return ratelimits.getSecondLimit(); } /** * @return the number of remaining requests that can be made per second or -1 if header not send */ public int rateLimitSecondRemaining() { return ratelimits.getSecondRemaining(); } /** * @return the number of seconds until the user can make a next request or -1 if header not send */ public int rateLimitReset() { return ratelimits.getReset(); } /** * @return a modeled error body response. */ public ErrorBody getErrorBody() { return errorBody; } private String headersToString(Headers headers) { final StringBuilder builder = new StringBuilder(); String divider = ""; for (final String name : headers.names()) { final String value = headers.get(name); builder.append(divider); builder.append(name); builder.append(": "); builder.append(value); if ("".equals(divider)) { divider = ", "; } } return builder.toString(); } }
display request body in errors
src/main/java/com/contentful/java/cma/model/CMAHttpException.java
display request body in errors
<ide><path>rc/main/java/com/contentful/java/cma/model/CMAHttpException.java <ide> import com.google.gson.GsonBuilder; <ide> <ide> import java.io.IOException; <add>import java.nio.charset.Charset; <ide> import java.util.List; <ide> import java.util.Locale; <ide> import java.util.Map; <ide> <ide> import okhttp3.Headers; <ide> import okhttp3.Request; <add>import okhttp3.RequestBody; <ide> import okhttp3.Response; <add>import okio.Buffer; <ide> <ide> import static java.lang.String.format; <ide> <ide> if (errorBody == null) { <ide> return format( <ide> Locale.getDefault(), <del> "FAILED \n\t%s\n\t↳ Header{%s}\n\t%s\n\t↳ Header{%s}", <add> "FAILED \n\t%s\n\t↳ Header{%s}%s\n\t%s\n\t↳ Header{%s}", <ide> request.toString(), <ide> headersToString(request.headers()), <add> maybeBodyToString(request.body()), <ide> response.toString(), <ide> headersToString(response.headers())); <ide> } else { <ide> return format( <ide> Locale.getDefault(), <del> "FAILED %s\n\t%s\n\t↳ Header{%s}\n\t%s\n\t↳ Header{%s}", <add> "FAILED %s\n\t%s\n\t↳ Header{%s}%s\n\t%s\n\t↳ Header{%s}", <ide> errorBody.toString(), <ide> request.toString(), <ide> headersToString(request.headers()), <add> maybeBodyToString(request.body()), <ide> response.toString(), <ide> headersToString(response.headers())); <add> } <add> } <add> <add> private String maybeBodyToString(RequestBody body) { <add> if (body != null) { <add> final Buffer sink = new Buffer(); <add> try { <add> body.writeTo(sink); <add> final String bodyContent = sink.readString(Charset.defaultCharset()); <add> <add> return "\n\t↳ Body " + bodyContent; <add> } catch (IOException e) { <add> return ""; <add> } <add> } else { <add> return ""; <ide> } <ide> } <ide>
JavaScript
mit
3352de56c556bcf5c20803277d88c49f34609721
0
fashionsun/jquery-placeholder,SimenB/jquery-placeholder,mathiasbynens/jquery-placeholder,jeremy-hawes/jquery-placeholder,zsh2938/jquery-placeholder,7213/jquery-placeholder,jeremy-hawes/jquery-placeholder,mlms13/jquery-placeholder,vslinko-forks/jquery-placeholder,zsh2938/jquery-placeholder,fashionsun/jquery-placeholder,SimenB/jquery-placeholder,7213/jquery-placeholder,mlms13/jquery-placeholder,mathiasbynens/jquery-placeholder
/*! * HTML5 Placeholder jQuery Plugin v1.7 * @link http://github.com/mathiasbynens/Placeholder-jQuery-Plugin * @author Mathias Bynens <http://mathiasbynens.be/> */ (function(f,z){var e=z in document.createElement('input'),a=z in document.createElement('textarea');if(e&&a){f.fn.placeholder=function(){return this}}else{f.fn.placeholder=function(){return this.filter((e?'textarea':':input')+'['+z+']').bind('focus.'+z,b).bind('blur.'+z,d).trigger('blur.'+z).end()}}function c(h){var g={},i=/^jQuery\d+$/;f.each(h.attributes,function(k,j){if(j.specified&&!i.test(j.name)){g[j.name]=j.value}});return g}function b(){var g=f(this);if(g.val()===g.attr(z)&&g.hasClass(z)){if(g.data(z+'-password')){g.hide().next().show().focus()}else{g.val('').removeClass(z)}}}function d(g){var j,i=f(this);if(i.val()===''||i.val()===i.attr(z)){if(i.is(':password')){if(!i.data(z+'-textinput')){try{j=i.clone().attr({type:'text'})}catch(h){j=f('<input>').attr(f.extend(c(i[0]),{type:'text'}))}j.removeAttr('name').data(z+'-password',true).bind('focus.'+z,b);i.data(z+'-textinput',j).before(j)}i=i.hide().prev().show()}i.addClass(z).val(i.attr(z))}else{i.removeClass(z)}}f(function(){f('form').bind('submit.'+z,function(){var g=f('.'+z,this).each(b);setTimeout(function(){g.each(d)},10)})});f(window).bind('unload.'+z,function(){f('.'+z).val('')})})(jQuery,'placeholder');
jquery.placeholder.min.js
/*! * HTML5 Placeholder jQuery Plugin v1.7 * @link http://github.com/mathiasbynens/Placeholder-jQuery-Plugin * @author Mathias Bynens <http://mathiasbynens.be/> */ (function(f,z){var e=z in document.createElement('input'),a=z in document.createElement('textarea');if(e&&a){f.fn.placeholder=function(){return this}}else{f.fn.placeholder=function(){return this.filter((e?'textarea':':input')+'['+z+']').bind('focus.'+z,b).bind('blur.'+z,d).trigger('blur.'+z).end()}}function c(h){var g={},i=/^jQuery\d+$/;f.each(h.attributes,function(k,j){if(i.specified&&!i.test(j.name)){g[j.name]=j.value}});return g}function b(){var g=f(this);if(g.val()===g.attr(z)&&g.hasClass(z)){if(g.data(z+'-password')){g.hide().next().show().focus()}else{g.val('').removeClass(z)}}}function d(g){var j,i=f(this);if(i.val()===''||i.val()===i.attr(z)){if(i.is(':password')){if(!i.data(z+'-textinput')){try{j=i.clone().attr({type:'text'})}catch(h){j=f('<input>').attr(f.extend(c(i[0]),{type:'text'}))}j.removeAttr('name').data(z+'-password',true).bind('focus.'+z,b);i.data(z+'-textinput',j).before(j)}i=i.hide().prev().show()}i.addClass(z).val(i.attr(z))}else{i.removeClass(z)}}f(function(){f('form').bind('submit.'+z,function(){var g=f('.'+z,this).each(b);setTimeout(function(){g.each(d)},10)})});f(window).bind('unload.'+z,function(){f('.'+z).val('')})})(jQuery,'placeholder');
Update minified version
jquery.placeholder.min.js
Update minified version
<ide><path>query.placeholder.min.js <ide> * @link http://github.com/mathiasbynens/Placeholder-jQuery-Plugin <ide> * @author Mathias Bynens <http://mathiasbynens.be/> <ide> */ <del>(function(f,z){var e=z in document.createElement('input'),a=z in document.createElement('textarea');if(e&&a){f.fn.placeholder=function(){return this}}else{f.fn.placeholder=function(){return this.filter((e?'textarea':':input')+'['+z+']').bind('focus.'+z,b).bind('blur.'+z,d).trigger('blur.'+z).end()}}function c(h){var g={},i=/^jQuery\d+$/;f.each(h.attributes,function(k,j){if(i.specified&&!i.test(j.name)){g[j.name]=j.value}});return g}function b(){var g=f(this);if(g.val()===g.attr(z)&&g.hasClass(z)){if(g.data(z+'-password')){g.hide().next().show().focus()}else{g.val('').removeClass(z)}}}function d(g){var j,i=f(this);if(i.val()===''||i.val()===i.attr(z)){if(i.is(':password')){if(!i.data(z+'-textinput')){try{j=i.clone().attr({type:'text'})}catch(h){j=f('<input>').attr(f.extend(c(i[0]),{type:'text'}))}j.removeAttr('name').data(z+'-password',true).bind('focus.'+z,b);i.data(z+'-textinput',j).before(j)}i=i.hide().prev().show()}i.addClass(z).val(i.attr(z))}else{i.removeClass(z)}}f(function(){f('form').bind('submit.'+z,function(){var g=f('.'+z,this).each(b);setTimeout(function(){g.each(d)},10)})});f(window).bind('unload.'+z,function(){f('.'+z).val('')})})(jQuery,'placeholder'); <add>(function(f,z){var e=z in document.createElement('input'),a=z in document.createElement('textarea');if(e&&a){f.fn.placeholder=function(){return this}}else{f.fn.placeholder=function(){return this.filter((e?'textarea':':input')+'['+z+']').bind('focus.'+z,b).bind('blur.'+z,d).trigger('blur.'+z).end()}}function c(h){var g={},i=/^jQuery\d+$/;f.each(h.attributes,function(k,j){if(j.specified&&!i.test(j.name)){g[j.name]=j.value}});return g}function b(){var g=f(this);if(g.val()===g.attr(z)&&g.hasClass(z)){if(g.data(z+'-password')){g.hide().next().show().focus()}else{g.val('').removeClass(z)}}}function d(g){var j,i=f(this);if(i.val()===''||i.val()===i.attr(z)){if(i.is(':password')){if(!i.data(z+'-textinput')){try{j=i.clone().attr({type:'text'})}catch(h){j=f('<input>').attr(f.extend(c(i[0]),{type:'text'}))}j.removeAttr('name').data(z+'-password',true).bind('focus.'+z,b);i.data(z+'-textinput',j).before(j)}i=i.hide().prev().show()}i.addClass(z).val(i.attr(z))}else{i.removeClass(z)}}f(function(){f('form').bind('submit.'+z,function(){var g=f('.'+z,this).each(b);setTimeout(function(){g.each(d)},10)})});f(window).bind('unload.'+z,function(){f('.'+z).val('')})})(jQuery,'placeholder');
Java
apache-2.0
158d05fb86f7203701e91d2046c90a1aa91c556d
0
hidroh/materialistic,hidroh/materialistic,hidroh/materialistic,hidroh/materialistic,hidroh/materialistic
/* * Copyright (c) 2015 Ha Duy Trung * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.hidroh.materialistic.data; import android.content.ContentResolver; import android.content.Context; import android.support.annotation.NonNull; import java.io.IOException; import javax.inject.Inject; import retrofit2.Call; import retrofit2.http.GET; import retrofit2.http.Headers; import retrofit2.http.Path; import rx.Observable; import rx.Scheduler; import rx.android.schedulers.AndroidSchedulers; /** * Client to retrieve Hacker News content asynchronously */ public class HackerNewsClient implements ItemManager, UserManager { public static final String HOST = "hacker-news.firebaseio.com"; public static final String BASE_WEB_URL = "https://news.ycombinator.com"; public static final String WEB_ITEM_PATH = BASE_WEB_URL + "/item?id=%s"; static final String BASE_API_URL = "https://" + HOST + "/v0/"; private final RestService mRestService; private final SessionManager mSessionManager; private final FavoriteManager mFavoriteManager; private final ContentResolver mContentResolver; private Scheduler mIoScheduler; @Inject public HackerNewsClient(Context context, RestServiceFactory factory, SessionManager sessionManager, FavoriteManager favoriteManager, Scheduler ioScheduler) { mRestService = factory.rxEnabled(true).create(BASE_API_URL, RestService.class); mSessionManager = sessionManager; mFavoriteManager = favoriteManager; mContentResolver = context.getApplicationContext().getContentResolver(); mIoScheduler = ioScheduler; } @Override public void getStories(@FetchMode String filter, @CacheMode int cacheMode, final ResponseListener<Item[]> listener) { if (listener == null) { return; } Observable.defer(() -> getStoriesObservable(filter, cacheMode)) .subscribeOn(mIoScheduler) .observeOn(AndroidSchedulers.mainThread()) .subscribe(listener::onResponse, t -> listener.onError(t != null ? t.getMessage() : "")); } @Override public void getItem(final String itemId, @CacheMode int cacheMode, ResponseListener<Item> listener) { if (listener == null) { return; } Observable<HackerNewsItem> itemObservable; switch (cacheMode) { case MODE_DEFAULT: default: itemObservable = mRestService.itemRx(itemId); break; case MODE_NETWORK: itemObservable = mRestService.networkItemRx(itemId); break; case MODE_CACHE: itemObservable = mRestService.cachedItemRx(itemId) .onErrorResumeNext(mRestService.itemRx(itemId)); break; } Observable.defer(() -> Observable.zip( mSessionManager.isViewed(mContentResolver, itemId), mFavoriteManager.check(mContentResolver, itemId), itemObservable, (isViewed, favorite, hackerNewsItem) -> { if (hackerNewsItem != null) { hackerNewsItem.preload(); hackerNewsItem.setIsViewed(isViewed); hackerNewsItem.setFavorite(favorite); } return hackerNewsItem; })) .subscribeOn(mIoScheduler) .observeOn(AndroidSchedulers.mainThread()) .subscribe(listener::onResponse, t -> listener.onError(t != null ? t.getMessage() : "")); } @Override public Item[] getStories(String filter, @CacheMode int cacheMode) { try { return toItems(getStoriesCall(filter, cacheMode).execute().body()); } catch (IOException e) { return new Item[0]; } } @Override public Item getItem(String itemId, @CacheMode int cacheMode) { Call<HackerNewsItem> call; switch (cacheMode) { case MODE_DEFAULT: case MODE_CACHE: default: call = mRestService.item(itemId); break; case MODE_NETWORK: call = mRestService.networkItem(itemId); break; } try { return call.execute().body(); } catch (IOException e) { return null; } } @Override public void getUser(String username, final ResponseListener<User> listener) { if (listener == null) { return; } mRestService.userRx(username) .map(userItem -> { if (userItem != null) { userItem.setSubmittedItems(toItems(userItem.getSubmitted())); } return userItem; }) .subscribeOn(mIoScheduler) .observeOn(AndroidSchedulers.mainThread()) .subscribe(listener::onResponse, t -> listener.onError(t != null ? t.getMessage() : "")); } @NonNull private Observable<Item[]> getStoriesObservable(@FetchMode String filter, @CacheMode int cacheMode) { Observable<int[]> observable; switch (filter) { case NEW_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkNewStoriesRx() : mRestService.newStoriesRx(); break; case SHOW_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkShowStoriesRx() : mRestService.showStoriesRx(); break; case ASK_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkAskStoriesRx() : mRestService.askStoriesRx(); break; case JOBS_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkJobStoriesRx() : mRestService.jobStoriesRx(); break; case BEST_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkBestStoriesRx() : mRestService.bestStoriesRx(); break; default: observable = cacheMode == MODE_NETWORK ? mRestService.networkTopStoriesRx() : mRestService.topStoriesRx(); break; } return observable.map(this::toItems); } @NonNull private Call<int[]> getStoriesCall(@FetchMode String filter, @CacheMode int cacheMode) { Call<int[]> call; if (filter == null) { // for legacy 'new stories' widgets return cacheMode == MODE_NETWORK ? mRestService.networkNewStories() : mRestService.newStories(); } switch (filter) { case NEW_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkNewStories() : mRestService.newStories(); break; case SHOW_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkShowStories() : mRestService.showStories(); break; case ASK_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkAskStories() : mRestService.askStories(); break; case JOBS_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkJobStories() : mRestService.jobStories(); break; case BEST_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkBestStories() : mRestService.bestStories(); break; default: call = cacheMode == MODE_NETWORK ? mRestService.networkTopStories() : mRestService.topStories(); break; } return call; } private HackerNewsItem[] toItems(int[] ids) { if (ids == null) { return null; } HackerNewsItem[] items = new HackerNewsItem[ids.length]; for (int i = 0; i < items.length; i++) { HackerNewsItem item = new HackerNewsItem(ids[i]); item.rank = i + 1; items[i] = item; } return items; } interface RestService { @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("topstories.json") Observable<int[]> topStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("newstories.json") Observable<int[]> newStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("showstories.json") Observable<int[]> showStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("askstories.json") Observable<int[]> askStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("jobstories.json") Observable<int[]> jobStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("beststories.json") Observable<int[]> bestStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("topstories.json") Observable<int[]> networkTopStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("newstories.json") Observable<int[]> networkNewStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("showstories.json") Observable<int[]> networkShowStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("askstories.json") Observable<int[]> networkAskStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("jobstories.json") Observable<int[]> networkJobStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("beststories.json") Observable<int[]> networkBestStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("item/{itemId}.json") Observable<HackerNewsItem> itemRx(@Path("itemId") String itemId); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("item/{itemId}.json") Observable<HackerNewsItem> networkItemRx(@Path("itemId") String itemId); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_CACHE) @GET("item/{itemId}.json") Observable<HackerNewsItem> cachedItemRx(@Path("itemId") String itemId); @GET("user/{userId}.json") Observable<UserItem> userRx(@Path("userId") String userId); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("topstories.json") Call<int[]> topStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("newstories.json") Call<int[]> newStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("showstories.json") Call<int[]> showStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("askstories.json") Call<int[]> askStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("jobstories.json") Call<int[]> jobStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("beststories.json") Call<int[]> bestStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("topstories.json") Call<int[]> networkTopStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("newstories.json") Call<int[]> networkNewStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("showstories.json") Call<int[]> networkShowStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("askstories.json") Call<int[]> networkAskStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("jobstories.json") Call<int[]> networkJobStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("beststories.json") Call<int[]> networkBestStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("item/{itemId}.json") Call<HackerNewsItem> item(@Path("itemId") String itemId); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("item/{itemId}.json") Call<HackerNewsItem> networkItem(@Path("itemId") String itemId); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_CACHE) @GET("item/{itemId}.json") Call<HackerNewsItem> cachedItem(@Path("itemId") String itemId); @GET("user/{userId}.json") Call<UserItem> user(@Path("userId") String userId); } }
app/src/main/java/io/github/hidroh/materialistic/data/HackerNewsClient.java
/* * Copyright (c) 2015 Ha Duy Trung * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.hidroh.materialistic.data; import android.content.ContentResolver; import android.content.Context; import android.support.annotation.NonNull; import java.io.IOException; import javax.inject.Inject; import retrofit2.Call; import retrofit2.http.GET; import retrofit2.http.Headers; import retrofit2.http.Path; import rx.Observable; import rx.Scheduler; import rx.android.schedulers.AndroidSchedulers; /** * Client to retrieve Hacker News content asynchronously */ public class HackerNewsClient implements ItemManager, UserManager { public static final String HOST = "hacker-news.firebaseio.com"; public static final String BASE_WEB_URL = "https://news.ycombinator.com"; public static final String WEB_ITEM_PATH = BASE_WEB_URL + "/item?id=%s"; static final String BASE_API_URL = "https://" + HOST + "/v0/"; private final RestService mRestService; private final SessionManager mSessionManager; private final FavoriteManager mFavoriteManager; private final ContentResolver mContentResolver; private Scheduler mIoScheduler; @Inject public HackerNewsClient(Context context, RestServiceFactory factory, SessionManager sessionManager, FavoriteManager favoriteManager, Scheduler ioScheduler) { mRestService = factory.rxEnabled(true).create(BASE_API_URL, RestService.class); mSessionManager = sessionManager; mFavoriteManager = favoriteManager; mContentResolver = context.getApplicationContext().getContentResolver(); mIoScheduler = ioScheduler; } @Override public void getStories(@FetchMode String filter, @CacheMode int cacheMode, final ResponseListener<Item[]> listener) { if (listener == null) { return; } Observable.defer(() -> getStoriesObservable(filter, cacheMode)) .subscribeOn(mIoScheduler) .observeOn(AndroidSchedulers.mainThread()) .subscribe(listener::onResponse, t -> listener.onError(t != null ? t.getMessage() : "")); } @Override public void getItem(final String itemId, @CacheMode int cacheMode, ResponseListener<Item> listener) { if (listener == null) { return; } Observable<HackerNewsItem> itemObservable; switch (cacheMode) { case MODE_DEFAULT: default: itemObservable = mRestService.itemRx(itemId); break; case MODE_NETWORK: itemObservable = mRestService.networkItemRx(itemId); break; case MODE_CACHE: itemObservable = mRestService.cachedItemRx(itemId) .onErrorResumeNext(mRestService.itemRx(itemId)); break; } Observable.defer(() -> Observable.zip( mSessionManager.isViewed(mContentResolver, itemId), mFavoriteManager.check(mContentResolver, itemId), itemObservable, (isViewed, favorite, hackerNewsItem) -> { if (hackerNewsItem != null) { hackerNewsItem.preload(); hackerNewsItem.setIsViewed(isViewed); hackerNewsItem.setFavorite(favorite); } return hackerNewsItem; })) .subscribeOn(mIoScheduler) .observeOn(AndroidSchedulers.mainThread()) .subscribe(listener::onResponse, t -> listener.onError(t != null ? t.getMessage() : "")); } @Override public Item[] getStories(String filter, @CacheMode int cacheMode) { try { return toItems(getStoriesCall(filter, cacheMode).execute().body()); } catch (IOException e) { return new Item[0]; } } @Override public Item getItem(String itemId, @CacheMode int cacheMode) { Call<HackerNewsItem> call; switch (cacheMode) { case MODE_DEFAULT: case MODE_CACHE: default: call = mRestService.item(itemId); break; case MODE_NETWORK: call = mRestService.networkItem(itemId); break; } try { return call.execute().body(); } catch (IOException e) { return null; } } @Override public void getUser(String username, final ResponseListener<User> listener) { if (listener == null) { return; } mRestService.userRx(username) .map(userItem -> { if (userItem != null) { userItem.setSubmittedItems(toItems(userItem.getSubmitted())); } return userItem; }) .subscribeOn(mIoScheduler) .observeOn(AndroidSchedulers.mainThread()) .subscribe(listener::onResponse, t -> listener.onError(t != null ? t.getMessage() : "")); } @NonNull private Observable<Item[]> getStoriesObservable(@FetchMode String filter, @CacheMode int cacheMode) { Observable<int[]> observable; switch (filter) { case NEW_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkNewStoriesRx() : mRestService.newStoriesRx(); break; case SHOW_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkShowStoriesRx() : mRestService.showStoriesRx(); break; case ASK_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkAskStoriesRx() : mRestService.askStoriesRx(); break; case JOBS_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkJobStoriesRx() : mRestService.jobStoriesRx(); break; case BEST_FETCH_MODE: observable = cacheMode == MODE_NETWORK ? mRestService.networkBestStoriesRx() : mRestService.bestStoriesRx(); break; default: observable = cacheMode == MODE_NETWORK ? mRestService.networkTopStoriesRx() : mRestService.topStoriesRx(); break; } return observable.map(this::toItems); } @NonNull private Call<int[]> getStoriesCall(@FetchMode String filter, @CacheMode int cacheMode) { Call<int[]> call; switch (filter) { case NEW_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkNewStories() : mRestService.newStories(); break; case SHOW_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkShowStories() : mRestService.showStories(); break; case ASK_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkAskStories() : mRestService.askStories(); break; case JOBS_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkJobStories() : mRestService.jobStories(); break; case BEST_FETCH_MODE: call = cacheMode == MODE_NETWORK ? mRestService.networkBestStories() : mRestService.bestStories(); break; default: call = cacheMode == MODE_NETWORK ? mRestService.networkTopStories() : mRestService.topStories(); break; } return call; } private HackerNewsItem[] toItems(int[] ids) { if (ids == null) { return null; } HackerNewsItem[] items = new HackerNewsItem[ids.length]; for (int i = 0; i < items.length; i++) { HackerNewsItem item = new HackerNewsItem(ids[i]); item.rank = i + 1; items[i] = item; } return items; } interface RestService { @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("topstories.json") Observable<int[]> topStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("newstories.json") Observable<int[]> newStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("showstories.json") Observable<int[]> showStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("askstories.json") Observable<int[]> askStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("jobstories.json") Observable<int[]> jobStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("beststories.json") Observable<int[]> bestStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("topstories.json") Observable<int[]> networkTopStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("newstories.json") Observable<int[]> networkNewStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("showstories.json") Observable<int[]> networkShowStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("askstories.json") Observable<int[]> networkAskStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("jobstories.json") Observable<int[]> networkJobStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("beststories.json") Observable<int[]> networkBestStoriesRx(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("item/{itemId}.json") Observable<HackerNewsItem> itemRx(@Path("itemId") String itemId); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("item/{itemId}.json") Observable<HackerNewsItem> networkItemRx(@Path("itemId") String itemId); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_CACHE) @GET("item/{itemId}.json") Observable<HackerNewsItem> cachedItemRx(@Path("itemId") String itemId); @GET("user/{userId}.json") Observable<UserItem> userRx(@Path("userId") String userId); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("topstories.json") Call<int[]> topStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("newstories.json") Call<int[]> newStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("showstories.json") Call<int[]> showStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("askstories.json") Call<int[]> askStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("jobstories.json") Call<int[]> jobStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("beststories.json") Call<int[]> bestStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("topstories.json") Call<int[]> networkTopStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("newstories.json") Call<int[]> networkNewStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("showstories.json") Call<int[]> networkShowStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("askstories.json") Call<int[]> networkAskStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("jobstories.json") Call<int[]> networkJobStories(); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("beststories.json") Call<int[]> networkBestStories(); @Headers(RestServiceFactory.CACHE_CONTROL_MAX_AGE_30M) @GET("item/{itemId}.json") Call<HackerNewsItem> item(@Path("itemId") String itemId); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_NETWORK) @GET("item/{itemId}.json") Call<HackerNewsItem> networkItem(@Path("itemId") String itemId); @Headers(RestServiceFactory.CACHE_CONTROL_FORCE_CACHE) @GET("item/{itemId}.json") Call<HackerNewsItem> cachedItem(@Path("itemId") String itemId); @GET("user/{userId}.json") Call<UserItem> user(@Path("userId") String userId); } }
Fix legacy widget
app/src/main/java/io/github/hidroh/materialistic/data/HackerNewsClient.java
Fix legacy widget
<ide><path>pp/src/main/java/io/github/hidroh/materialistic/data/HackerNewsClient.java <ide> @NonNull <ide> private Call<int[]> getStoriesCall(@FetchMode String filter, @CacheMode int cacheMode) { <ide> Call<int[]> call; <add> if (filter == null) { <add> // for legacy 'new stories' widgets <add> return cacheMode == MODE_NETWORK ? <add> mRestService.networkNewStories() : mRestService.newStories(); <add> } <ide> switch (filter) { <ide> case NEW_FETCH_MODE: <ide> call = cacheMode == MODE_NETWORK ?
Java
lgpl-2.1
ea0e5c3c35b27d963e489653d2192bf15a9e1fd1
0
jimregan/languagetool,languagetool-org/languagetool,jimregan/languagetool,jimregan/languagetool,languagetool-org/languagetool,languagetool-org/languagetool,jimregan/languagetool,languagetool-org/languagetool,languagetool-org/languagetool,jimregan/languagetool
/* LanguageTool, a natural language style checker * Copyright (C) 2007 Daniel Naber (http://www.danielnaber.de) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.language; import com.google.common.cache.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.languagetool.*; import org.languagetool.chunking.Chunker; import org.languagetool.chunking.EnglishChunker; import org.languagetool.languagemodel.LanguageModel; import org.languagetool.rules.*; import org.languagetool.rules.en.*; import org.languagetool.rules.neuralnetwork.NeuralNetworkRuleCreator; import org.languagetool.rules.neuralnetwork.Word2VecModel; import org.languagetool.rules.patterns.PatternRuleLoader; import org.languagetool.synthesis.Synthesizer; import org.languagetool.synthesis.en.EnglishSynthesizer; import org.languagetool.tagging.Tagger; import org.languagetool.tagging.disambiguation.Disambiguator; import org.languagetool.tagging.en.EnglishHybridDisambiguator; import org.languagetool.tagging.en.EnglishTagger; import org.languagetool.tokenizers.*; import org.languagetool.tokenizers.en.EnglishWordTokenizer; import java.io.*; import java.lang.ref.WeakReference; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.function.Function; /** * Support for English - use the sub classes {@link BritishEnglish}, {@link AmericanEnglish}, * etc. if you need spell checking. * Make sure to call {@link #close()} after using this (currently only relevant if you make * use of {@link EnglishConfusionProbabilityRule}). */ public class English extends Language implements AutoCloseable { private static final LoadingCache<String, List<Rule>> cache = CacheBuilder.newBuilder() .expireAfterWrite(30, TimeUnit.MINUTES) .build(new CacheLoader<String, List<Rule>>() { @Override public List<Rule> load(@NotNull String path) throws IOException { List<Rule> rules = new ArrayList<>(); PatternRuleLoader loader = new PatternRuleLoader(); try (InputStream is = JLanguageTool.getDataBroker().getAsStream(path)) { rules.addAll(loader.getRules(is, path)); } return rules; } }); private static volatile WeakReference<EnglishTagger> cachedTagger; private static final Language AMERICAN_ENGLISH = new AmericanEnglish(); private LanguageModel languageModel; /** * @deprecated use {@link AmericanEnglish} or {@link BritishEnglish} etc. instead - * they have rules for spell checking, this class doesn't (deprecated since 3.2) */ @Deprecated public English() { } @Override public Language getDefaultLanguageVariant() { return AMERICAN_ENGLISH; } @Override public SentenceTokenizer createDefaultSentenceTokenizer() { return new SRXSentenceTokenizer(this); } @Override public String getName() { return "English"; } @Override public String getShortCode() { return "en"; } @Override public String[] getCountries() { return new String[]{}; } @NotNull @Override public Tagger createDefaultTagger() { WeakReference<EnglishTagger> ref = cachedTagger; EnglishTagger tagger = ref == null ? null : ref.get(); if (tagger == null) { tagger = new EnglishTagger(); cachedTagger = new WeakReference<>(tagger); } return tagger; } @Nullable @Override public Chunker createDefaultChunker() { return new EnglishChunker(); } @Nullable @Override public Synthesizer createDefaultSynthesizer() { return new EnglishSynthesizer(this); } @Override public Disambiguator createDefaultDisambiguator() { return new EnglishHybridDisambiguator(); } @Override public Tokenizer createDefaultWordTokenizer() { return new EnglishWordTokenizer(); } @Override public synchronized LanguageModel getLanguageModel(File indexDir) throws IOException { languageModel = initLanguageModel(indexDir, languageModel); return languageModel; } @Override public synchronized Word2VecModel getWord2VecModel(File indexDir) throws IOException { return new Word2VecModel(indexDir + File.separator + getShortCode()); } @Override public Contributor[] getMaintainers() { return new Contributor[] { new Contributor("Mike Unwalla"), Contributors.MARCIN_MILKOWSKI, Contributors.DANIEL_NABER }; } @Override public LanguageMaintainedState getMaintainedState() { return LanguageMaintainedState.ActivelyMaintained; } @Override public List<Rule> getRelevantRules(ResourceBundle messages, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { List<Rule> allRules = new ArrayList<>(); if (motherTongue != null) { if ("de".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-de.xml")); } else if ("fr".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-fr.xml")); } } allRules.addAll(Arrays.asList( new CommaWhitespaceRule(messages, Example.wrong("We had coffee<marker> ,</marker> cheese and crackers and grapes."), Example.fixed("We had coffee<marker>,</marker> cheese and crackers and grapes.")), new DoublePunctuationRule(messages), new UppercaseSentenceStartRule(messages, this, Example.wrong("This house is old. <marker>it</marker> was built in 1950."), Example.fixed("This house is old. <marker>It</marker> was built in 1950.")), new MultipleWhitespaceRule(messages, this), new SentenceWhitespaceRule(messages), new WhiteSpaceBeforeParagraphEnd(messages, this), new WhiteSpaceAtBeginOfParagraph(messages), new EmptyLineRule(messages, this), new LongSentenceRule(messages, userConfig, 40, true, true), new LongParagraphRule(messages, this, userConfig), new ParagraphRepeatBeginningRule(messages, this), new PunctuationMarkAtParagraphEnd(messages, this), new PunctuationMarkAtParagraphEnd2(messages, this), // specific to English: new ConsistentApostrophesRule(messages), new SpecificCaseRule(messages), new EnglishUnpairedBracketsRule(messages, this), new EnglishWordRepeatRule(messages, this), new AvsAnRule(messages), new EnglishWordRepeatBeginningRule(messages, this), new CompoundRule(messages), new ContractionSpellingRule(messages), new EnglishWrongWordInContextRule(messages), new EnglishDashRule(messages), new WordCoherencyRule(messages), new EnglishDiacriticsRule(messages), new EnglishPlainEnglishRule(messages), new EnglishRedundancyRule(messages), new SimpleReplaceRule(messages, this), new ReadabilityRule(messages, this, userConfig, false), new ReadabilityRule(messages, this, userConfig, true) )); return allRules; } @Override public List<Rule> getRelevantLanguageModelRules(ResourceBundle messages, LanguageModel languageModel, UserConfig userConfig) throws IOException { return Arrays.asList( new UpperCaseNgramRule(messages, languageModel, this, userConfig), new EnglishConfusionProbabilityRule(messages, languageModel, this), new EnglishNgramProbabilityRule(messages, languageModel, this) ); } @Override public List<Rule> getRelevantLanguageModelCapableRules(ResourceBundle messages, @Nullable LanguageModel lm, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { if (lm != null && motherTongue != null && "fr".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForFrenchFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "de".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForGermansFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "es".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForSpaniardsFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "nl".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForDutchmenFalseFriendRule(messages, lm, motherTongue, this) ); } return Arrays.asList(); } @Override public boolean hasNGramFalseFriendRule(Language motherTongue) { return motherTongue != null && ( // Note: extend EnglishForL2SpeakersFalseFriendRuleTest.testMessageDetailData() // if you add a language here "de".equals(motherTongue.getShortCode()) || "fr".equals(motherTongue.getShortCode()) || "es".equals(motherTongue.getShortCode()) || "nl".equals(motherTongue.getShortCode())); } @Override public List<Rule> getRelevantWord2VecModelRules(ResourceBundle messages, Word2VecModel word2vecModel) throws IOException { return NeuralNetworkRuleCreator.createRules(messages, this, word2vecModel); } /** @since 5.1 */ @Override public String getOpeningDoubleQuote() { return "“"; } /** @since 5.1 */ @Override public String getClosingDoubleQuote() { return "”"; } /** @since 5.1 */ @Override public String getOpeningSingleQuote() { return "‘"; } /** @since 5.1 */ @Override public String getClosingSingleQuote() { return "’"; } /** @since 5.1 */ @Override public boolean isAdvancedTypographyEnabled() { return true; } /** * Closes the language model, if any. * @since 2.7 */ @Override public void close() throws Exception { if (languageModel != null) { languageModel.close(); } } @Override protected int getPriorityForId(String id) { switch (id) { case "I_E": return 10; // needs higher prio than EN_COMPOUNDS ("i.e learning") case "EN_DIACRITICS_REPLACE": return 9; // prefer over spell checker (like PHRASE_REPETITION) case "CHILDISH_LANGUAGE": return 8; // prefer over spell checker case "RUDE_SARCASTIC": return 6; // prefer over spell checker case "FOR_NOUN_SAKE": return 6; // prefer over PROFANITY (e.g. "for fuck sake") case "YEAR_OLD_HYPHEN": return 6; // higher prio than MISSING_HYPHEN case "MISSING_HYPHEN": return 5; case "TRANSLATION_RULE": return 5; // Premium case "WRONG_APOSTROPHE": return 5; case "DOS_AND_DONTS": return 3; case "EN_COMPOUNDS": return 2; case "ABBREVIATION_PUNCTUATION": return 2; case "FEDEX": return 2; // higher prio than many verb rules (e.g. MD_BASEFORM) case "LIFE_COMPOUNDS": return 1; case "COVID_19": return 1; case "OTHER_WISE_COMPOUND": return 1; case "ON_EXCEL": return 1; case "IF_VB_PCT": return 1; // higher prio than IF_VB case "CAUSE_BECAUSE": return 1; // higher prio than MISSING_TO_BETWEEN_BE_AND_VB case "MAY_MANY": return 1; // higher prio than MAY_MANY_MY case "BOUT_TO": return 1; // higher prio than PRP_VB case "HAVE_HAVE": return 1; // higher prio than HE_D_VBD case "UPPERCASE_SENTENCE_START": return 1; // higher prio than AI_HYDRA_LEO case "TO_TOO": return 1; // higher prio than AI_HYDRA_LEO_REPLACE_* case "CAR_CARE": return 1; // higher prio than AI_HYDRA_LEO_MISSING_THE case "LUV": return 1; // higher prio than spell checker case "DAT": return 1; // higher prio than spell checker case "MAC_OS": return 1; // higher prio than spell checker case "BESTEST": return 1; // higher prio than spell checker case "OFF_OF": return 1; // higher prio than ADJECTIVE_ADVERB case "SHELL_COMPOUNDS": return 1; // higher prio than HELL case "HANDS_ON_HYPHEN": return 1; // higher prio than A_NNS case "QUIET_QUITE": return 1; // higher prio than A_QUITE_WHILE case "A_OK": return 1; // prefer over A_AN case "I_A": return 1; // higher prio than I_IF case "GOT_GO": return 1; // higher prio than MD_BASEFORM case "THERE_FORE": return 1; // higher prio than FORE_FOR case "PRP_NO_VB": return 1; // higher prio than I_IF case "FOLLOW_UP": return 1; // higher prio than MANY_NN case "IT_SOMETHING": return 1; // higher prio than IF_YOU_ANY and IT_THE_PRP case "NO_KNOW": return 1; // higher prio than DOUBLE_NEGATIVE case "WILL_BASED_ON": return 1; // higher prio than MD_BASEFORM / PRP_PAST_PART case "DON_T_AREN_T": return 1; // higher prio than DID_BASEFORM case "WILL_BECOMING": return 1; // higher prio than MD_BASEFORM case "WOULD_NEVER_VBN": return 1; // higher prio than MD_BASEFORM case "MD_APPRECIATED": return 1; // higher prio than MD_BASEFORM case "MONEY_BACK_HYPHEN": return 1; // higher prio than A_UNCOUNTABLE case "WORLDS_BEST": return 1; // higher prio than THE_SUPERLATIVE case "STEP_COMPOUNDS": return 1; // higher prio than STARS_AND_STEPS case "WON_T_TO": return 1; // higher prio than DON_T_AREN_T case "WAN_T": return 1; // higher prio than DON_T_AREN_T case "THE_US": return 1; // higher prio than DT_PRP case "THE_IT": return 1; // higher prio than DT_PRP case "THANK_YOU_MUCH": return 1; // higher prio than other rules case "TO_DO_HYPHEN": return 1; // higher prio than other rules case "A_NUMBER_NNS": return 1; // higher prio than A_NNS case "A_HUNDREDS": return 1; // higher prio than A_NNS case "NOW_A_DAYS": return 1; // higher prio than A_NNS case "COUPLE_OF_TIMES": return 1; // higher prio than A_NNS case "A_WINDOWS": return 1; // higher prio than A_NNS case "A_SCISSOR": return 1; // higher prio than A_NNS case "A_SNICKERS": return 1; // higher prio than A_NNS case "ROUND_A_BOUT": return 1; // higher prio than A_NNS case "A_NNS_BEST_NN": return 1; // higher prio than A_NNS case "A_BACHELORS_IN": return 1; // higher prio than A_NNS case "NEITHER_NOR": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "FOR_AWHILE": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "A_BUT": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "MAY_BE": return 1; // higher prio than IS_IT_MAY (premium rule) case "BORN_IN": return 1; // higher prio than PRP_PAST_PART case "DO_TO": return 1; // higher prio than HAVE_PART_AGREEMENT case "CURIOS_CURIOUS": return 1; // higher prio than A_NNS and POSSESSIVE_APOSTROPHE case "INCORRECT_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS case "THIS_YEARS_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS case "SPURIOUS_APOSTROPHE": return 1; // higher prio than THIS_NNS case "IN_THIS_REGARDS": return 1; // higher prio than THIS_NNS case "NO_WHERE": return 1; // higher prio than NOW case "APOSTROPHE_VS_QUOTE": return 1; // higher prio than EN_QUOTES case "COMMA_PERIOD": return 1; // higher prio than COMMA_PARENTHESIS_WHITESPACE case "HERE_HEAR": return 1; // higher prio than ENGLISH_WORD_REPEAT_RULE case "LIGATURES": return 1; // prefer over spell checker case "APPSTORE": return 1; // prefer over spell checker case "INCORRECT_CONTRACTIONS": return 1; // prefer over EN_CONTRACTION_SPELLING case "DONT_T": return 1; // prefer over EN_CONTRACTION_SPELLING case "WHATS_APP": return 1; // prefer over EN_CONTRACTION_SPELLING case "NON_STANDARD_COMMA": return 1; // prefer over spell checker case "NON_STANDARD_ALPHABETIC_CHARACTERS": return 1; // prefer over spell checker case "WONT_CONTRACTION": return 1; // prefer over WONT_WANT case "YOU_GOOD": return 1; // prefer over PRP_PAST_PART case "THAN_THANK": return 1; // prefer over THAN_THEN case "CD_NN_APOSTROPHE_S": return 1; // prefer over CD_NN and LOWERCASE_NAME_APOSTROPHE_S case "IT_IF": return 1; // needs higher prio than PRP_COMMA and IF_YOU_ANY case "FINE_TUNE_COMPOUNDS": return 1; // prefer over less specific rules case "WHAT_IS_YOU": return 1; // prefer over HOW_DO_I_VB, NON3PRS_VERB case "SUPPOSE_TO": return 1; // prefer over HOW_DO_I_VB case "SEEN_SEEM": return 1; // prefer over PRP_PAST_PART case "PROFANITY": return 1; // prefer over spell checker (less prio than EN_COMPOUNDS) case "THE_THEM": return 1; // prefer over TO_TWO case "THERE_THEIR": return 1; // prefer over GO_TO_HOME case "IT_IS_DEPENDING_ON": return 1; // prefer over PROGRESSIVE_VERBS case "IRREGARDLESS": return 1; // prefer over spell checker case "MISSING_GENITIVE": return -1; // prefer over spell checker (like EN_SPECIFIC_CASE) case "EN_UNPAIRED_BRACKETS": return -1; // less priority than rules that suggest the correct brackets case "NEEDS_FIXED": return -1; // less priority than MISSING_TO_BEFORE_A_VERB case "BLACK_SEA": return -1; // less priority than SEA_COMPOUNDS case "A_TO": return -1; // less priority than other rules that offer suggestions case "MANY_NN": return -1; // less priority than PUSH_UP_HYPHEN, SOME_FACULTY case "WE_BE": return -1; case "A_LOT_OF_NN": return -1; case "IT_VBZ": return -1; case "ORDER_OF_WORDS_WITH_NOT": return -1; // less prio than punctuation rules case "ADVERB_WORD_ORDER_10_TEMP": return 1; case "ADVERB_WORD_ORDER": return -1; // less prio than PRP_PAST_PART // case "IT_IS_2": return -1; // needs higher prio than BEEN_PART_AGREEMENT case "A_RB_NN": return -1; // prefer other more specific rules (e.g. QUIET_QUITE, A_QUITE_WHILE) case "DT_RB_IN": return -1; // prefer other more specific rules case "VERB_NOUN_CONFUSION": return -1; // prefer other more specific rules case "NOUN_VERB_CONFUSION": return -1; // prefer other more specific rules case "PLURAL_VERB_AFTER_THIS": return -1; // prefer other more specific rules (e.g. COMMA_TAG_QUESTION) case "BE_RB_BE": return -1; // prefer other more specific rules case "IT_ITS": return -1; // prefer other more specific rules case "ENGLISH_WORD_REPEAT_RULE": return -1; // prefer other more specific rules (e.g. IT_IT) case "PRP_MD_NN": return -1; // prefer other more specific rules (e.g. MD_ABLE, WONT_WANT) case "NON_ANTI_PRE_JJ": return -1; // prefer other more specific rules case "DT_JJ_NO_NOUN": return -1; // prefer other more specific rules (e.g. THIRD_PARTY) case "AGREEMENT_SENT_START": return -1; // prefer other more specific rules case "HAVE_PART_AGREEMENT": return -1; // prefer other more specific rules case "PREPOSITION_VERB": return -1; // prefer other more specific rules case "EN_A_VS_AN": return -1; // prefer other more specific rules (with suggestions, e.g. AN_ALSO) case "CD_NN": return -1; // prefer other more specific rules (with suggestions) case "ATD_VERBS_TO_COLLOCATION": return -1; // prefer other more specific rules (with suggestions) case "ADVERB_OR_HYPHENATED_ADJECTIVE": return -1; // prefer other more specific rules (with suggestions) case "GOING_TO_VBD": return -1; // prefer other more specific rules (with suggestions, e.g. GOING_TO_JJ) case "MISSING_PREPOSITION": return -1; // prefer other more specific rules (with suggestions) case "BE_TO_VBG": return -1; // prefer other more specific rules (with suggestions) case "NON3PRS_VERB": return -1; // prefer other more specific rules (with suggestions, e.g. DONS_T) case "DID_FOUND_AMBIGUOUS": return -1; // prefer other more specific rules (e.g. TWO_CONNECTED_MODAL_VERBS) case "BE_I_BE_GERUND": return -1; // prefer other more specific rules (with suggestions) case "VBZ_VBD": return -1; // prefer other more specific rules (e.g. IS_WAS) case "SUPERLATIVE_THAN": return -1; // prefer other more specific rules case "UNLIKELY_OPENING_PUNCTUATION": return -1; // prefer other more specific rules case "METRIC_UNITS_EN_IMPERIAL": return -1; // prefer MILE_HYPHEN case "METRIC_UNITS_EN_GB": return -1; // prefer MILE_HYPHEN case "COMMA_COMPOUND_SENTENCE": return -1; // prefer other rules case "COMMA_COMPOUND_SENTENCE_2": return -1; // prefer other rules case "PRP_RB_NO_VB": return -2; // prefer other more specific rules (with suggestions) case "PRP_VBG": return -2; // prefer other more specific rules (with suggestions, prefer over HE_VERB_AGR) case "PRP_VBZ": return -2; // prefer other more specific rules (with suggestions) case "PRP_VB": return -2; // prefer other more specific rules (with suggestions) case "BE_VBP_IN": return -2; // prefer over BEEN_PART_AGREEMENT case "BEEN_PART_AGREEMENT": return -3; // prefer other more specific rules (e.g. VARY_VERY, VB_NN) case "A_INFINITIVE": return -3; // prefer other more specific rules (with suggestions, e.g. PREPOSITION_VERB, THE_TO) case "HE_VERB_AGR": return -3; // prefer other more specific rules (e.g. PRP_VBG) case "PRP_JJ": return -3; // prefer other rules (e.g. PRP_VBG, IT_IT and ADJECTIVE_ADVERB, PRP_ABLE, PRP_NEW, MD_IT_JJ) case "PRONOUN_NOUN": return -3; // prefer other rules (e.g. PRP_VB, PRP_JJ) case "INDIAN_ENGLISH": return -3; // prefer grammar rules, but higher prio than spell checker case "GONNA_TEMP": return -3; case "PRP_THE": return -4; // prefer other rules (e.g. I_A, PRP_JJ, IF_YOU_ANY, I_AN) case "GONNA": return -4; // prefer over spelling rules case "MORFOLOGIK_RULE_EN_US": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_GB": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_CA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_ZA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_NZ": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_AU": return -10; // more specific rules (e.g. L2 rules) have priority case "TWO_CONNECTED_MODAL_VERBS": return -15; case "WANT_TO_NN": return -25; // prefer more specific rules that give a suggestion case "QUESTION_WITHOUT_VERB": return -25; // prefer more specific rules that give a suggestion case "SENTENCE_FRAGMENT": return -50; // prefer other more important sentence start corrections. case "SENTENCE_FRAGMENT_SINGLE_WORDS": return -51; // prefer other more important sentence start corrections. case "EN_REDUNDANCY_REPLACE": return -510; // style rules should always have the lowest priority. case "EN_PLAIN_ENGLISH_REPLACE": return -511; // style rules should always have the lowest priority. case "THREE_NN": return -600; // style rules should always have the lowest priority. case "SENT_START_NUM": return -600; // style rules should always have the lowest priority. case "PASSIVE_VOICE": return -600; // style rules should always have the lowest priority. case "EG_NO_COMMA": return -600; // style rules should always have the lowest priority. case "IE_NO_COMMA": return -600; // style rules should always have the lowest priority. case "REASON_WHY": return -600; // style rules should always have the lowest priority. case LongSentenceRule.RULE_ID: return -997; case LongParagraphRule.RULE_ID: return -998; } if (id.startsWith("CONFUSION_RULE_")) { return -20; } if (id.matches("EN_FOR_[A-Z]+_SPEAKERS_FALSE_FRIENDS.*")) { return -21; } return super.getPriorityForId(id); } @Override public Function<Rule, Rule> getRemoteEnhancedRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { Function<Rule, Rule> fallback = super.getRemoteEnhancedRules(messageBundle, configs, userConfig, motherTongue, altLanguages, inputLogging); RemoteRuleConfig bert = RemoteRuleConfig.getRelevantConfig(BERTSuggestionRanking.RULE_ID, configs); return original -> { if (original.isDictionaryBasedSpellingRule() && original.getId().startsWith("MORFOLOGIK_RULE_EN")) { if (bert != null) { return new BERTSuggestionRanking(this, original, bert, inputLogging); } } return fallback.apply(original); }; } @Override public List<Rule> getRelevantRemoteRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { List<Rule> rules = new ArrayList<>(super.getRelevantRemoteRules( messageBundle, configs, globalConfig, userConfig, motherTongue, altLanguages, inputLogging)); // no description needed - matches based on automatically created rules with descriptions provided by remote server rules.addAll(GRPCRule.createAll(this, configs, inputLogging, "AI_EN_", "INTERNAL - dynamically loaded rule supported by remote server")); rules.addAll(GRPCRule.createAll(this, configs, inputLogging, "AI_HYDRA_LEO", "INTERNAL - dynamically loaded rule supported by remote server")); return rules; } }
languagetool-language-modules/en/src/main/java/org/languagetool/language/English.java
/* LanguageTool, a natural language style checker * Copyright (C) 2007 Daniel Naber (http://www.danielnaber.de) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.language; import com.google.common.cache.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.languagetool.*; import org.languagetool.chunking.Chunker; import org.languagetool.chunking.EnglishChunker; import org.languagetool.languagemodel.LanguageModel; import org.languagetool.rules.*; import org.languagetool.rules.en.*; import org.languagetool.rules.neuralnetwork.NeuralNetworkRuleCreator; import org.languagetool.rules.neuralnetwork.Word2VecModel; import org.languagetool.rules.patterns.PatternRuleLoader; import org.languagetool.synthesis.Synthesizer; import org.languagetool.synthesis.en.EnglishSynthesizer; import org.languagetool.tagging.Tagger; import org.languagetool.tagging.disambiguation.Disambiguator; import org.languagetool.tagging.en.EnglishHybridDisambiguator; import org.languagetool.tagging.en.EnglishTagger; import org.languagetool.tokenizers.*; import org.languagetool.tokenizers.en.EnglishWordTokenizer; import java.io.*; import java.lang.ref.WeakReference; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.function.Function; /** * Support for English - use the sub classes {@link BritishEnglish}, {@link AmericanEnglish}, * etc. if you need spell checking. * Make sure to call {@link #close()} after using this (currently only relevant if you make * use of {@link EnglishConfusionProbabilityRule}). */ public class English extends Language implements AutoCloseable { private static final LoadingCache<String, List<Rule>> cache = CacheBuilder.newBuilder() .expireAfterWrite(30, TimeUnit.MINUTES) .build(new CacheLoader<String, List<Rule>>() { @Override public List<Rule> load(@NotNull String path) throws IOException { List<Rule> rules = new ArrayList<>(); PatternRuleLoader loader = new PatternRuleLoader(); try (InputStream is = JLanguageTool.getDataBroker().getAsStream(path)) { rules.addAll(loader.getRules(is, path)); } return rules; } }); private static volatile WeakReference<EnglishTagger> cachedTagger; private static final Language AMERICAN_ENGLISH = new AmericanEnglish(); private LanguageModel languageModel; /** * @deprecated use {@link AmericanEnglish} or {@link BritishEnglish} etc. instead - * they have rules for spell checking, this class doesn't (deprecated since 3.2) */ @Deprecated public English() { } @Override public Language getDefaultLanguageVariant() { return AMERICAN_ENGLISH; } @Override public SentenceTokenizer createDefaultSentenceTokenizer() { return new SRXSentenceTokenizer(this); } @Override public String getName() { return "English"; } @Override public String getShortCode() { return "en"; } @Override public String[] getCountries() { return new String[]{}; } @NotNull @Override public Tagger createDefaultTagger() { WeakReference<EnglishTagger> ref = cachedTagger; EnglishTagger tagger = ref == null ? null : ref.get(); if (tagger == null) { tagger = new EnglishTagger(); cachedTagger = new WeakReference<>(tagger); } return tagger; } @Nullable @Override public Chunker createDefaultChunker() { return new EnglishChunker(); } @Nullable @Override public Synthesizer createDefaultSynthesizer() { return new EnglishSynthesizer(this); } @Override public Disambiguator createDefaultDisambiguator() { return new EnglishHybridDisambiguator(); } @Override public Tokenizer createDefaultWordTokenizer() { return new EnglishWordTokenizer(); } @Override public synchronized LanguageModel getLanguageModel(File indexDir) throws IOException { languageModel = initLanguageModel(indexDir, languageModel); return languageModel; } @Override public synchronized Word2VecModel getWord2VecModel(File indexDir) throws IOException { return new Word2VecModel(indexDir + File.separator + getShortCode()); } @Override public Contributor[] getMaintainers() { return new Contributor[] { new Contributor("Mike Unwalla"), Contributors.MARCIN_MILKOWSKI, Contributors.DANIEL_NABER }; } @Override public LanguageMaintainedState getMaintainedState() { return LanguageMaintainedState.ActivelyMaintained; } @Override public List<Rule> getRelevantRules(ResourceBundle messages, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { List<Rule> allRules = new ArrayList<>(); if (motherTongue != null) { if ("de".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-de.xml")); } else if ("fr".equals(motherTongue.getShortCode())) { allRules.addAll(cache.getUnchecked("/org/languagetool/rules/en/grammar-l2-fr.xml")); } } allRules.addAll(Arrays.asList( new CommaWhitespaceRule(messages, Example.wrong("We had coffee<marker> ,</marker> cheese and crackers and grapes."), Example.fixed("We had coffee<marker>,</marker> cheese and crackers and grapes.")), new DoublePunctuationRule(messages), new UppercaseSentenceStartRule(messages, this, Example.wrong("This house is old. <marker>it</marker> was built in 1950."), Example.fixed("This house is old. <marker>It</marker> was built in 1950.")), new MultipleWhitespaceRule(messages, this), new SentenceWhitespaceRule(messages), new WhiteSpaceBeforeParagraphEnd(messages, this), new WhiteSpaceAtBeginOfParagraph(messages), new EmptyLineRule(messages, this), new LongSentenceRule(messages, userConfig, 40, true, true), new LongParagraphRule(messages, this, userConfig), new ParagraphRepeatBeginningRule(messages, this), new PunctuationMarkAtParagraphEnd(messages, this), new PunctuationMarkAtParagraphEnd2(messages, this), // specific to English: new ConsistentApostrophesRule(messages), new SpecificCaseRule(messages), new EnglishUnpairedBracketsRule(messages, this), new EnglishWordRepeatRule(messages, this), new AvsAnRule(messages), new EnglishWordRepeatBeginningRule(messages, this), new CompoundRule(messages), new ContractionSpellingRule(messages), new EnglishWrongWordInContextRule(messages), new EnglishDashRule(messages), new WordCoherencyRule(messages), new EnglishDiacriticsRule(messages), new EnglishPlainEnglishRule(messages), new EnglishRedundancyRule(messages), new SimpleReplaceRule(messages, this), new ReadabilityRule(messages, this, userConfig, false), new ReadabilityRule(messages, this, userConfig, true) )); return allRules; } @Override public List<Rule> getRelevantLanguageModelRules(ResourceBundle messages, LanguageModel languageModel, UserConfig userConfig) throws IOException { return Arrays.asList( new UpperCaseNgramRule(messages, languageModel, this, userConfig), new EnglishConfusionProbabilityRule(messages, languageModel, this), new EnglishNgramProbabilityRule(messages, languageModel, this) ); } @Override public List<Rule> getRelevantLanguageModelCapableRules(ResourceBundle messages, @Nullable LanguageModel lm, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages) throws IOException { if (lm != null && motherTongue != null && "fr".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForFrenchFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "de".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForGermansFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "es".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForSpaniardsFalseFriendRule(messages, lm, motherTongue, this) ); } if (lm != null && motherTongue != null && "nl".equals(motherTongue.getShortCode())) { return Arrays.asList( new EnglishForDutchmenFalseFriendRule(messages, lm, motherTongue, this) ); } return Arrays.asList(); } @Override public boolean hasNGramFalseFriendRule(Language motherTongue) { return motherTongue != null && ( // Note: extend EnglishForL2SpeakersFalseFriendRuleTest.testMessageDetailData() // if you add a language here "de".equals(motherTongue.getShortCode()) || "fr".equals(motherTongue.getShortCode()) || "es".equals(motherTongue.getShortCode()) || "nl".equals(motherTongue.getShortCode())); } @Override public List<Rule> getRelevantWord2VecModelRules(ResourceBundle messages, Word2VecModel word2vecModel) throws IOException { return NeuralNetworkRuleCreator.createRules(messages, this, word2vecModel); } /** @since 5.1 */ @Override public String getOpeningDoubleQuote() { return "“"; } /** @since 5.1 */ @Override public String getClosingDoubleQuote() { return "”"; } /** @since 5.1 */ @Override public String getOpeningSingleQuote() { return "‘"; } /** @since 5.1 */ @Override public String getClosingSingleQuote() { return "’"; } /** @since 5.1 */ @Override public boolean isAdvancedTypographyEnabled() { return true; } /** * Closes the language model, if any. * @since 2.7 */ @Override public void close() throws Exception { if (languageModel != null) { languageModel.close(); } } @Override protected int getPriorityForId(String id) { switch (id) { case "I_E": return 10; // needs higher prio than EN_COMPOUNDS ("i.e learning") case "YEAR_OLD_HYPHEN": return 6; // higher prio than MISSING_HYPHEN case "MISSING_HYPHEN": return 5; case "TRANSLATION_RULE": return 5; // Premium case "WRONG_APOSTROPHE": return 5; case "DOS_AND_DONTS": return 3; case "EN_COMPOUNDS": return 2; case "ABBREVIATION_PUNCTUATION": return 2; case "FEDEX": return 2; // higher prio than many verb rules (e.g. MD_BASEFORM) case "LIFE_COMPOUNDS": return 1; case "COVID_19": return 1; case "OTHER_WISE_COMPOUND": return 1; case "ON_EXCEL": return 1; case "IF_VB_PCT": return 1; // higher prio than IF_VB case "CAUSE_BECAUSE": return 1; // higher prio than MISSING_TO_BETWEEN_BE_AND_VB case "MAY_MANY": return 1; // higher prio than MAY_MANY_MY case "BOUT_TO": return 1; // higher prio than PRP_VB case "HAVE_HAVE": return 1; // higher prio than HE_D_VBD case "CAR_CARE": return 1; // higher prio than AI_MISSING_WORD_ARTICLE_THE case "LUV": return 1; // higher prio than spell checker case "DAT": return 1; // higher prio than spell checker case "MAC_OS": return 1; // higher prio than spell checker case "BESTEST": return 1; // higher prio than spell checker case "OFF_OF": return 1; // higher prio than ADJECTIVE_ADVERB case "SHELL_COMPOUNDS": return 1; // higher prio than HELL case "HANDS_ON_HYPHEN": return 1; // higher prio than A_NNS case "QUIET_QUITE": return 1; // higher prio than A_QUITE_WHILE case "A_OK": return 1; // prefer over A_AN case "I_A": return 1; // higher prio than I_IF case "GOT_GO": return 1; // higher prio than MD_BASEFORM case "UPPERCASE_SENTENCE_START": return 1; // higher prio than AI_MISSING_THE_* case "THERE_FORE": return 1; // higher prio than FORE_FOR case "PRP_NO_VB": return 1; // higher prio than I_IF case "FOLLOW_UP": return 1; // higher prio than MANY_NN case "IT_SOMETHING": return 1; // higher prio than IF_YOU_ANY and IT_THE_PRP case "NO_KNOW": return 1; // higher prio than DOUBLE_NEGATIVE case "WILL_BASED_ON": return 1; // higher prio than MD_BASEFORM / PRP_PAST_PART case "DON_T_AREN_T": return 1; // higher prio than DID_BASEFORM case "WILL_BECOMING": return 1; // higher prio than MD_BASEFORM case "WOULD_NEVER_VBN": return 1; // higher prio than MD_BASEFORM case "MD_APPRECIATED": return 1; // higher prio than MD_BASEFORM case "MONEY_BACK_HYPHEN": return 1; // higher prio than A_UNCOUNTABLE case "WORLDS_BEST": return 1; // higher prio than THE_SUPERLATIVE case "STEP_COMPOUNDS": return 1; // higher prio than STARS_AND_STEPS case "WON_T_TO": return 1; // higher prio than DON_T_AREN_T case "WAN_T": return 1; // higher prio than DON_T_AREN_T case "THE_US": return 1; // higher prio than DT_PRP case "THE_IT": return 1; // higher prio than DT_PRP case "THANK_YOU_MUCH": return 1; // higher prio than other rules case "TO_DO_HYPHEN": return 1; // higher prio than other rules case "A_NUMBER_NNS": return 1; // higher prio than A_NNS case "A_HUNDREDS": return 1; // higher prio than A_NNS case "NOW_A_DAYS": return 1; // higher prio than A_NNS case "COUPLE_OF_TIMES": return 1; // higher prio than A_NNS case "A_WINDOWS": return 1; // higher prio than A_NNS case "A_SCISSOR": return 1; // higher prio than A_NNS case "A_SNICKERS": return 1; // higher prio than A_NNS case "ROUND_A_BOUT": return 1; // higher prio than A_NNS case "A_NNS_BEST_NN": return 1; // higher prio than A_NNS case "A_BACHELORS_IN": return 1; // higher prio than A_NNS case "NEITHER_NOR": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "FOR_AWHILE": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "A_BUT": return 1; // higher prio than COMMA_COMPOUND_SENTENCE case "MAY_BE": return 1; // higher prio than IS_IT_MAY (premium rule) case "BORN_IN": return 1; // higher prio than PRP_PAST_PART case "DO_TO": return 1; // higher prio than HAVE_PART_AGREEMENT case "CURIOS_CURIOUS": return 1; // higher prio than A_NNS and POSSESSIVE_APOSTROPHE case "INCORRECT_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS case "THIS_YEARS_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS case "SPURIOUS_APOSTROPHE": return 1; // higher prio than THIS_NNS case "IN_THIS_REGARDS": return 1; // higher prio than THIS_NNS case "NO_WHERE": return 1; // higher prio than NOW case "APOSTROPHE_VS_QUOTE": return 1; // higher prio than EN_QUOTES case "COMMA_PERIOD": return 1; // higher prio than COMMA_PARENTHESIS_WHITESPACE case "HERE_HEAR": return 1; // higher prio than ENGLISH_WORD_REPEAT_RULE case "LIGATURES": return 1; // prefer over spell checker case "APPSTORE": return 1; // prefer over spell checker case "INCORRECT_CONTRACTIONS": return 1; // prefer over EN_CONTRACTION_SPELLING case "DONT_T": return 1; // prefer over EN_CONTRACTION_SPELLING case "WHATS_APP": return 1; // prefer over EN_CONTRACTION_SPELLING case "NON_STANDARD_COMMA": return 1; // prefer over spell checker case "NON_STANDARD_ALPHABETIC_CHARACTERS": return 1; // prefer over spell checker case "WONT_CONTRACTION": return 1; // prefer over WONT_WANT case "YOU_GOOD": return 1; // prefer over PRP_PAST_PART case "THAN_THANK": return 1; // prefer over THAN_THEN case "CD_NN_APOSTROPHE_S": return 1; // prefer over CD_NN and LOWERCASE_NAME_APOSTROPHE_S case "IT_IF": return 1; // needs higher prio than PRP_COMMA and IF_YOU_ANY case "FINE_TUNE_COMPOUNDS": return 1; // prefer over less specific rules case "WHAT_IS_YOU": return 1; // prefer over HOW_DO_I_VB, NON3PRS_VERB case "SUPPOSE_TO": return 1; // prefer over HOW_DO_I_VB case "SEEN_SEEM": return 1; // prefer over PRP_PAST_PART case "PROFANITY": return 1; // prefer over spell checker (less prio than EN_COMPOUNDS) case "THE_THEM": return 1; // prefer over TO_TWO case "THERE_THEIR": return 1; // prefer over GO_TO_HOME case "IT_IS_DEPENDING_ON": return 1; // prefer over PROGRESSIVE_VERBS case "IRREGARDLESS": return 1; // prefer over spell checker case "FOR_NOUN_SAKE": return 6; // prefer over PROFANITY (e.g. "for fuck sake") case "RUDE_SARCASTIC": return 6; // prefer over spell checker case "CHILDISH_LANGUAGE": return 8; // prefer over spell checker case "EN_DIACRITICS_REPLACE": return 9; // prefer over spell checker (like PHRASE_REPETITION) case "MISSING_GENITIVE": return -1; // prefer over spell checker (like EN_SPECIFIC_CASE) case "EN_UNPAIRED_BRACKETS": return -1; // less priority than rules that suggest the correct brackets case "NEEDS_FIXED": return -1; // less priority than MISSING_TO_BEFORE_A_VERB case "BLACK_SEA": return -1; // less priority than SEA_COMPOUNDS case "A_TO": return -1; // less priority than other rules that offer suggestions case "MANY_NN": return -1; // less priority than PUSH_UP_HYPHEN, SOME_FACULTY case "WE_BE": return -1; case "A_LOT_OF_NN": return -1; case "IT_VBZ": return -1; case "ORDER_OF_WORDS_WITH_NOT": return -1; // less prio than punctuation rules case "ADVERB_WORD_ORDER_10_TEMP": return 1; case "ADVERB_WORD_ORDER": return -1; // less prio than PRP_PAST_PART // case "IT_IS_2": return -1; // needs higher prio than BEEN_PART_AGREEMENT case "A_RB_NN": return -1; // prefer other more specific rules (e.g. QUIET_QUITE, A_QUITE_WHILE) case "DT_RB_IN": return -1; // prefer other more specific rules case "VERB_NOUN_CONFUSION": return -1; // prefer other more specific rules case "NOUN_VERB_CONFUSION": return -1; // prefer other more specific rules case "PLURAL_VERB_AFTER_THIS": return -1; // prefer other more specific rules (e.g. COMMA_TAG_QUESTION) case "BE_RB_BE": return -1; // prefer other more specific rules case "IT_ITS": return -1; // prefer other more specific rules case "ENGLISH_WORD_REPEAT_RULE": return -1; // prefer other more specific rules (e.g. IT_IT) case "PRP_MD_NN": return -1; // prefer other more specific rules (e.g. MD_ABLE, WONT_WANT) case "NON_ANTI_PRE_JJ": return -1; // prefer other more specific rules case "DT_JJ_NO_NOUN": return -1; // prefer other more specific rules (e.g. THIRD_PARTY) case "AGREEMENT_SENT_START": return -1; // prefer other more specific rules case "HAVE_PART_AGREEMENT": return -1; // prefer other more specific rules case "PREPOSITION_VERB": return -1; // prefer other more specific rules case "EN_A_VS_AN": return -1; // prefer other more specific rules (with suggestions, e.g. AN_ALSO) case "CD_NN": return -1; // prefer other more specific rules (with suggestions) case "ATD_VERBS_TO_COLLOCATION": return -1; // prefer other more specific rules (with suggestions) case "ADVERB_OR_HYPHENATED_ADJECTIVE": return -1; // prefer other more specific rules (with suggestions) case "GOING_TO_VBD": return -1; // prefer other more specific rules (with suggestions, e.g. GOING_TO_JJ) case "MISSING_PREPOSITION": return -1; // prefer other more specific rules (with suggestions) case "BE_TO_VBG": return -1; // prefer other more specific rules (with suggestions) case "NON3PRS_VERB": return -1; // prefer other more specific rules (with suggestions, e.g. DONS_T) case "DID_FOUND_AMBIGUOUS": return -1; // prefer other more specific rules (e.g. TWO_CONNECTED_MODAL_VERBS) case "BE_I_BE_GERUND": return -1; // prefer other more specific rules (with suggestions) case "VBZ_VBD": return -1; // prefer other more specific rules (e.g. IS_WAS) case "SUPERLATIVE_THAN": return -1; // prefer other more specific rules case "UNLIKELY_OPENING_PUNCTUATION": return -1; // prefer other more specific rules case "METRIC_UNITS_EN_IMPERIAL": return -1; // prefer MILE_HYPHEN case "METRIC_UNITS_EN_GB": return -1; // prefer MILE_HYPHEN case "COMMA_COMPOUND_SENTENCE": return -1; // prefer other rules case "COMMA_COMPOUND_SENTENCE_2": return -1; // prefer other rules case "PRP_RB_NO_VB": return -2; // prefer other more specific rules (with suggestions) case "PRP_VBG": return -2; // prefer other more specific rules (with suggestions, prefer over HE_VERB_AGR) case "PRP_VBZ": return -2; // prefer other more specific rules (with suggestions) case "PRP_VB": return -2; // prefer other more specific rules (with suggestions) case "BE_VBP_IN": return -2; // prefer over BEEN_PART_AGREEMENT case "BEEN_PART_AGREEMENT": return -3; // prefer other more specific rules (e.g. VARY_VERY, VB_NN) case "A_INFINITIVE": return -3; // prefer other more specific rules (with suggestions, e.g. PREPOSITION_VERB, THE_TO) case "HE_VERB_AGR": return -3; // prefer other more specific rules (e.g. PRP_VBG) case "PRP_JJ": return -3; // prefer other rules (e.g. PRP_VBG, IT_IT and ADJECTIVE_ADVERB, PRP_ABLE, PRP_NEW, MD_IT_JJ) case "PRONOUN_NOUN": return -3; // prefer other rules (e.g. PRP_VB, PRP_JJ) case "INDIAN_ENGLISH": return -3; // prefer grammar rules, but higher prio than spell checker case "GONNA_TEMP": return -3; case "PRP_THE": return -4; // prefer other rules (e.g. I_A, PRP_JJ, IF_YOU_ANY, I_AN) case "GONNA": return -4; // prefer over spelling rules case "MORFOLOGIK_RULE_EN_US": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_GB": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_CA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_ZA": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_NZ": return -10; // more specific rules (e.g. L2 rules) have priority case "MORFOLOGIK_RULE_EN_AU": return -10; // more specific rules (e.g. L2 rules) have priority case "TWO_CONNECTED_MODAL_VERBS": return -15; case "WANT_TO_NN": return -25; // prefer more specific rules that give a suggestion case "QUESTION_WITHOUT_VERB": return -25; // prefer more specific rules that give a suggestion case "SENTENCE_FRAGMENT": return -50; // prefer other more important sentence start corrections. case "SENTENCE_FRAGMENT_SINGLE_WORDS": return -51; // prefer other more important sentence start corrections. case "EN_REDUNDANCY_REPLACE": return -510; // style rules should always have the lowest priority. case "EN_PLAIN_ENGLISH_REPLACE": return -511; // style rules should always have the lowest priority. case "THREE_NN": return -600; // style rules should always have the lowest priority. case "SENT_START_NUM": return -600; // style rules should always have the lowest priority. case "PASSIVE_VOICE": return -600; // style rules should always have the lowest priority. case "EG_NO_COMMA": return -600; // style rules should always have the lowest priority. case "IE_NO_COMMA": return -600; // style rules should always have the lowest priority. case "REASON_WHY": return -600; // style rules should always have the lowest priority. case LongSentenceRule.RULE_ID: return -997; case LongParagraphRule.RULE_ID: return -998; } if (id.startsWith("CONFUSION_RULE_")) { return -20; } if (id.matches("EN_FOR_[A-Z]+_SPEAKERS_FALSE_FRIENDS.*")) { return -21; } return super.getPriorityForId(id); } @Override public Function<Rule, Rule> getRemoteEnhancedRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { Function<Rule, Rule> fallback = super.getRemoteEnhancedRules(messageBundle, configs, userConfig, motherTongue, altLanguages, inputLogging); RemoteRuleConfig bert = RemoteRuleConfig.getRelevantConfig(BERTSuggestionRanking.RULE_ID, configs); return original -> { if (original.isDictionaryBasedSpellingRule() && original.getId().startsWith("MORFOLOGIK_RULE_EN")) { if (bert != null) { return new BERTSuggestionRanking(this, original, bert, inputLogging); } } return fallback.apply(original); }; } @Override public List<Rule> getRelevantRemoteRules(ResourceBundle messageBundle, List<RemoteRuleConfig> configs, GlobalConfig globalConfig, UserConfig userConfig, Language motherTongue, List<Language> altLanguages, boolean inputLogging) throws IOException { List<Rule> rules = new ArrayList<>(super.getRelevantRemoteRules( messageBundle, configs, globalConfig, userConfig, motherTongue, altLanguages, inputLogging)); // no description needed - matches based on automatically created rules with descriptions provided by remote server rules.addAll(GRPCRule.createAll(this, configs, inputLogging, "AI_EN_", "INTERNAL - dynamically loaded rule supported by remote server")); rules.addAll(GRPCRule.createAll(this, configs, inputLogging, "AI_HYDRA_LEO", "INTERNAL - dynamically loaded rule supported by remote server")); return rules; } }
Increase TO_TOO priority to be higher than HydraLEO and code style improvements (#4635) * Increase priority of TO_TOO to be higher than AI_HYDRA_LEO * Coding style improvements
languagetool-language-modules/en/src/main/java/org/languagetool/language/English.java
Increase TO_TOO priority to be higher than HydraLEO and code style improvements (#4635)
<ide><path>anguagetool-language-modules/en/src/main/java/org/languagetool/language/English.java <ide> @Override <ide> protected int getPriorityForId(String id) { <ide> switch (id) { <del> case "I_E": return 10; // needs higher prio than EN_COMPOUNDS ("i.e learning") <del> case "YEAR_OLD_HYPHEN": return 6; // higher prio than MISSING_HYPHEN <add> case "I_E": return 10; // needs higher prio than EN_COMPOUNDS ("i.e learning") <add> case "EN_DIACRITICS_REPLACE": return 9; // prefer over spell checker (like PHRASE_REPETITION) <add> case "CHILDISH_LANGUAGE": return 8; // prefer over spell checker <add> case "RUDE_SARCASTIC": return 6; // prefer over spell checker <add> case "FOR_NOUN_SAKE": return 6; // prefer over PROFANITY (e.g. "for fuck sake") <add> case "YEAR_OLD_HYPHEN": return 6; // higher prio than MISSING_HYPHEN <ide> case "MISSING_HYPHEN": return 5; <ide> case "TRANSLATION_RULE": return 5; // Premium <ide> case "WRONG_APOSTROPHE": return 5; <ide> case "COVID_19": return 1; <ide> case "OTHER_WISE_COMPOUND": return 1; <ide> case "ON_EXCEL": return 1; <del> case "IF_VB_PCT": return 1; // higher prio than IF_VB <add> case "IF_VB_PCT": return 1; // higher prio than IF_VB <ide> case "CAUSE_BECAUSE": return 1; // higher prio than MISSING_TO_BETWEEN_BE_AND_VB <ide> case "MAY_MANY": return 1; // higher prio than MAY_MANY_MY <ide> case "BOUT_TO": return 1; // higher prio than PRP_VB <ide> case "HAVE_HAVE": return 1; // higher prio than HE_D_VBD <del> case "CAR_CARE": return 1; // higher prio than AI_MISSING_WORD_ARTICLE_THE <add> case "UPPERCASE_SENTENCE_START": return 1; // higher prio than AI_HYDRA_LEO <add> case "TO_TOO": return 1; // higher prio than AI_HYDRA_LEO_REPLACE_* <add> case "CAR_CARE": return 1; // higher prio than AI_HYDRA_LEO_MISSING_THE <ide> case "LUV": return 1; // higher prio than spell checker <ide> case "DAT": return 1; // higher prio than spell checker <ide> case "MAC_OS": return 1; // higher prio than spell checker <ide> case "A_OK": return 1; // prefer over A_AN <ide> case "I_A": return 1; // higher prio than I_IF <ide> case "GOT_GO": return 1; // higher prio than MD_BASEFORM <del> case "UPPERCASE_SENTENCE_START": return 1; // higher prio than AI_MISSING_THE_* <ide> case "THERE_FORE": return 1; // higher prio than FORE_FOR <ide> case "PRP_NO_VB": return 1; // higher prio than I_IF <ide> case "FOLLOW_UP": return 1; // higher prio than MANY_NN <ide> case "BORN_IN": return 1; // higher prio than PRP_PAST_PART <ide> case "DO_TO": return 1; // higher prio than HAVE_PART_AGREEMENT <ide> case "CURIOS_CURIOUS": return 1; // higher prio than A_NNS and POSSESSIVE_APOSTROPHE <del> case "INCORRECT_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS <del> case "THIS_YEARS_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS <del> case "SPURIOUS_APOSTROPHE": return 1; // higher prio than THIS_NNS <add> case "INCORRECT_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS <add> case "THIS_YEARS_POSSESSIVE_APOSTROPHE": return 1; // higher prio than THIS_NNS <add> case "SPURIOUS_APOSTROPHE": return 1; // higher prio than THIS_NNS <ide> case "IN_THIS_REGARDS": return 1; // higher prio than THIS_NNS <ide> case "NO_WHERE": return 1; // higher prio than NOW <ide> case "APOSTROPHE_VS_QUOTE": return 1; // higher prio than EN_QUOTES <ide> case "DONT_T": return 1; // prefer over EN_CONTRACTION_SPELLING <ide> case "WHATS_APP": return 1; // prefer over EN_CONTRACTION_SPELLING <ide> case "NON_STANDARD_COMMA": return 1; // prefer over spell checker <del> case "NON_STANDARD_ALPHABETIC_CHARACTERS": return 1; // prefer over spell checker <add> case "NON_STANDARD_ALPHABETIC_CHARACTERS": return 1; // prefer over spell checker <ide> case "WONT_CONTRACTION": return 1; // prefer over WONT_WANT <ide> case "YOU_GOOD": return 1; // prefer over PRP_PAST_PART <ide> case "THAN_THANK": return 1; // prefer over THAN_THEN <ide> case "THERE_THEIR": return 1; // prefer over GO_TO_HOME <ide> case "IT_IS_DEPENDING_ON": return 1; // prefer over PROGRESSIVE_VERBS <ide> case "IRREGARDLESS": return 1; // prefer over spell checker <del> case "FOR_NOUN_SAKE": return 6; // prefer over PROFANITY (e.g. "for fuck sake") <del> case "RUDE_SARCASTIC": return 6; // prefer over spell checker <del> case "CHILDISH_LANGUAGE": return 8; // prefer over spell checker <del> case "EN_DIACRITICS_REPLACE": return 9; // prefer over spell checker (like PHRASE_REPETITION) <ide> case "MISSING_GENITIVE": return -1; // prefer over spell checker (like EN_SPECIFIC_CASE) <ide> case "EN_UNPAIRED_BRACKETS": return -1; // less priority than rules that suggest the correct brackets <ide> case "NEEDS_FIXED": return -1; // less priority than MISSING_TO_BEFORE_A_VERB <ide> case "WE_BE": return -1; <ide> case "A_LOT_OF_NN": return -1; <ide> case "IT_VBZ": return -1; <del> case "ORDER_OF_WORDS_WITH_NOT": return -1; // less prio than punctuation rules <add> case "ORDER_OF_WORDS_WITH_NOT": return -1; // less prio than punctuation rules <ide> case "ADVERB_WORD_ORDER_10_TEMP": return 1; <ide> case "ADVERB_WORD_ORDER": return -1; // less prio than PRP_PAST_PART // <ide> case "IT_IS_2": return -1; // needs higher prio than BEEN_PART_AGREEMENT <ide> case "BE_I_BE_GERUND": return -1; // prefer other more specific rules (with suggestions) <ide> case "VBZ_VBD": return -1; // prefer other more specific rules (e.g. IS_WAS) <ide> case "SUPERLATIVE_THAN": return -1; // prefer other more specific rules <del> case "UNLIKELY_OPENING_PUNCTUATION": return -1; // prefer other more specific rules <add> case "UNLIKELY_OPENING_PUNCTUATION": return -1; // prefer other more specific rules <ide> case "METRIC_UNITS_EN_IMPERIAL": return -1; // prefer MILE_HYPHEN <ide> case "METRIC_UNITS_EN_GB": return -1; // prefer MILE_HYPHEN <ide> case "COMMA_COMPOUND_SENTENCE": return -1; // prefer other rules <ide> case "MORFOLOGIK_RULE_EN_NZ": return -10; // more specific rules (e.g. L2 rules) have priority <ide> case "MORFOLOGIK_RULE_EN_AU": return -10; // more specific rules (e.g. L2 rules) have priority <ide> case "TWO_CONNECTED_MODAL_VERBS": return -15; <del> case "WANT_TO_NN": return -25; // prefer more specific rules that give a suggestion <del> case "QUESTION_WITHOUT_VERB": return -25; // prefer more specific rules that give a suggestion <del> case "SENTENCE_FRAGMENT": return -50; // prefer other more important sentence start corrections. <add> case "WANT_TO_NN": return -25; // prefer more specific rules that give a suggestion <add> case "QUESTION_WITHOUT_VERB": return -25; // prefer more specific rules that give a suggestion <add> case "SENTENCE_FRAGMENT": return -50; // prefer other more important sentence start corrections. <ide> case "SENTENCE_FRAGMENT_SINGLE_WORDS": return -51; // prefer other more important sentence start corrections. <ide> case "EN_REDUNDANCY_REPLACE": return -510; // style rules should always have the lowest priority. <ide> case "EN_PLAIN_ENGLISH_REPLACE": return -511; // style rules should always have the lowest priority.
Java
apache-2.0
6bba261e6630dca41de778a9aaea510ab2af84a7
0
akjava/gwt-three.js-test,akjava/gwt-three.js-test,akjava/gwt-three.js-test
/* * gwt-wrap three.js * * Copyright (c) 2013 [email protected] Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. based Three.js r63 https://github.com/mrdoob/three.js The MIT License Copyright (c) 2010-2013 three.js Authors. All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.akjava.gwt.three.client.js.core; import com.akjava.gwt.three.client.gwt.core.Offset; import com.akjava.gwt.three.client.js.math.Box3; import com.akjava.gwt.three.client.js.math.Sphere; import com.akjava.gwt.three.client.js.math.Vector3; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; import com.google.gwt.typedarrays.shared.ArrayBuffer; public class BufferGeometry extends EventDispatcher{ protected BufferGeometry() { } public final native BufferAttribute getIndex()/*-{ return this.index; }-*/; public final native void setIndex(BufferAttribute index)/*-{ this.index = index; }-*/; //TODO make return class public final native JsArray<JavaScriptObject> computeOffsets(int indexBufferSize)/*-{ return this.computeOffsets(indexBufferSize); }-*/; public final native void reorderBuffers(ArrayBuffer indexBuffer,ArrayBuffer indexMap,int vertexCount)/*-{ this.reorderBuffers(indexBuffer,indexMap,vertexCount); }-*/; public final native JavaScriptObject getAttributes()/*-{ return this.attributes; }-*/; public final native void setAttributes(JavaScriptObject attributes)/*-{ this.attributes = attributes; }-*/; //TODO support settings public final native BufferGeometry fromGeometry(Geometry geometry, JavaScriptObject settings)/*-{ return this.fromGeometry(geometry,settings); }-*/; public final native JsArray<Offset> getOffsets()/*-{ return this.offsets; }-*/; public final native void setOffsets(JsArray<Offset> offsets)/*-{ this.offsets = offsets; }-*/; public final native Box3 getBoundingBox()/*-{ return this.boundingBox; }-*/; public final native void setBoundingBox(Box3 boundingBox)/*-{ this.boundingBox = boundingBox; }-*/; public final native Sphere getBoundingSphere()/*-{ return this.boundingSphere; }-*/; public final native void setBoundingSphere(Sphere boundingSphere)/*-{ this.boundingSphere = boundingSphere; }-*/; public final native boolean isHasTangents()/*-{ return this.hasTangents; }-*/; public final native void setHasTangents(boolean hasTangents)/*-{ this.hasTangents = hasTangents; }-*/; /** * i have no idea what is it type? * @return */ public final native JsArray getMorphTargets()/*-{ return this.morphTargets; }-*/; /** * i have no idea what is it type? * @return */ public final native void setMorphTargets(JsArray morphTargets)/*-{ this.morphTargets = morphTargets; }-*/; /** * @deprecated * it's gone */ public final native void addAttribute(Object name,Object type,Object numItems,Object itemSize)/*-{ this.addAttribute(name,type,numItems,itemSize); }-*/; /** * @deprecated on r72 * @param start * @param count * @param indexOffset */ public final native void addDrawCall(int start,int count,int indexOffset)/*-{ this.addDrawCall(start,count,indexOffset); }-*/; public final native void addAttribute(String name,BufferAttribute attribute)/*-{ this.addAttribute(name,attribute); }-*/; public final native BufferAttribute getAttribute(String name)/*-{ return this.getAttribute(name); }-*/; public final native void applyMatrix(Object matrix)/*-{ this.applyMatrix(matrix); }-*/; public final native void computeBoundingBox()/*-{ this.computeBoundingBox(); }-*/; public final native Sphere computeBoundingSphere()/*-{ return this.computeBoundingSphere(); }-*/; public final native void computeVertexNormals()/*-{ this.computeVertexNormals(); }-*/; public final native void normalizeNormals()/*-{ this.normalizeNormals(); }-*/; /** * @deprecated on r72 */ public final native void computeTangents()/*-{ this.computeTangents(); }-*/; public final native Vector3 center()/*-{ return this.center(); }-*/; public final native void dispose()/*-{ this.dispose(); }-*/; public native final BufferGeometry merge(BufferGeometry geo,int offset)/*-{ return this.merge(geo,offset); }-*/; public final native BufferGeometry copy(BufferGeometry source)/*-{ return this.copy(source); }-*/; public final native BufferGeometry clone()/*-{ return this.clone(); }-*/; public final native void addGroup(int start,int count,int materialIndex)/*-{ this.addGroup(start,count,materialIndex); }-*/; public final native void clearGroups()/*-{ this.clearGroups(); }-*/; public final native void setDrawRange(int start,int count)/*-{ this.setDrawRange(start,count); }-*/; public final native void rotateX(double angle)/*-{ this.rotateX(angle); }-*/; public final native void rotateY(double angle)/*-{ this.rotateY(angle); }-*/; public final native void rotateZ(double angle)/*-{ this.rotateZ(angle); }-*/; public final native void translate(double x,double y,double z)/*-{ this.translate(x,y,z); }-*/; public final native void scale(double x,double y,double z)/*-{ this.scale(x,y,z); }-*/; public final native void lookAt(Vector3 vector)/*-{ this.lookAt(vector); }-*/; }
src/com/akjava/gwt/three/client/js/core/BufferGeometry.java
/* * gwt-wrap three.js * * Copyright (c) 2013 [email protected] Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. based Three.js r63 https://github.com/mrdoob/three.js The MIT License Copyright (c) 2010-2013 three.js Authors. All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.akjava.gwt.three.client.js.core; import com.akjava.gwt.three.client.gwt.core.Offset; import com.akjava.gwt.three.client.js.math.Box3; import com.akjava.gwt.three.client.js.math.Sphere; import com.akjava.gwt.three.client.js.math.Vector3; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.JsArray; import com.google.gwt.typedarrays.shared.ArrayBuffer; public class BufferGeometry extends EventDispatcher{ protected BufferGeometry() { } //TODO make return class public final native JsArray<JavaScriptObject> computeOffsets(int indexBufferSize)/*-{ return this.computeOffsets(indexBufferSize); }-*/; public final native void reorderBuffers(ArrayBuffer indexBuffer,ArrayBuffer indexMap,int vertexCount)/*-{ this.reorderBuffers(indexBuffer,indexMap,vertexCount); }-*/; public final native JavaScriptObject getAttributes()/*-{ return this.attributes; }-*/; public final native void setAttributes(JavaScriptObject attributes)/*-{ this.attributes = attributes; }-*/; //TODO support settings public final native BufferGeometry fromGeometry(Geometry geometry, JavaScriptObject settings)/*-{ return this.fromGeometry(geometry,settings); }-*/; public final native JsArray<Offset> getOffsets()/*-{ return this.offsets; }-*/; public final native void setOffsets(JsArray<Offset> offsets)/*-{ this.offsets = offsets; }-*/; public final native Box3 getBoundingBox()/*-{ return this.boundingBox; }-*/; public final native void setBoundingBox(Box3 boundingBox)/*-{ this.boundingBox = boundingBox; }-*/; public final native Sphere getBoundingSphere()/*-{ return this.boundingSphere; }-*/; public final native void setBoundingSphere(Sphere boundingSphere)/*-{ this.boundingSphere = boundingSphere; }-*/; public final native boolean isHasTangents()/*-{ return this.hasTangents; }-*/; public final native void setHasTangents(boolean hasTangents)/*-{ this.hasTangents = hasTangents; }-*/; /** * i have no idea what is it type? * @return */ public final native JsArray getMorphTargets()/*-{ return this.morphTargets; }-*/; /** * i have no idea what is it type? * @return */ public final native void setMorphTargets(JsArray morphTargets)/*-{ this.morphTargets = morphTargets; }-*/; /** * @deprecated * it's gone */ public final native void addAttribute(Object name,Object type,Object numItems,Object itemSize)/*-{ this.addAttribute(name,type,numItems,itemSize); }-*/; public final native void addDrawCall(int start,int count,int indexOffset)/*-{ this.addDrawCall(start,count,indexOffset); }-*/; public final native void addAttribute(String name,BufferAttribute attribute)/*-{ this.addAttribute(name,attribute); }-*/; public final native BufferAttribute getAttribute(String name)/*-{ return this.getAttribute(name); }-*/; public final native void applyMatrix(Object matrix)/*-{ this.applyMatrix(matrix); }-*/; public final native void computeBoundingBox()/*-{ this.computeBoundingBox(); }-*/; public final native Sphere computeBoundingSphere()/*-{ return this.computeBoundingSphere(); }-*/; public final native void computeVertexNormals()/*-{ this.computeVertexNormals(); }-*/; public final native void normalizeNormals()/*-{ this.normalizeNormals(); }-*/; public final native void computeTangents()/*-{ this.computeTangents(); }-*/; public final native Vector3 center()/*-{ return this.center(); }-*/; public final native void dispose()/*-{ this.dispose(); }-*/; public native final BufferGeometry merge(BufferGeometry geo,int offset)/*-{ return this.merge(geo,offset); }-*/; public final native BufferGeometry copy(BufferGeometry source)/*-{ return this.copy(source); }-*/; public final native BufferGeometry clone()/*-{ return this.clone(); }-*/; }
for r72 modify BufferGeometry
src/com/akjava/gwt/three/client/js/core/BufferGeometry.java
for r72 modify BufferGeometry
<ide><path>rc/com/akjava/gwt/three/client/js/core/BufferGeometry.java <ide> } <ide> <ide> <add> public final native BufferAttribute getIndex()/*-{ <add> return this.index; <add> }-*/; <add> <add> public final native void setIndex(BufferAttribute index)/*-{ <add> this.index = index; <add> }-*/; <ide> <ide> //TODO make return class <ide> public final native JsArray<JavaScriptObject> computeOffsets(int indexBufferSize)/*-{ <ide> this.addAttribute(name,type,numItems,itemSize); <ide> }-*/; <ide> <add>/** <add> * @deprecated on r72 <add> * @param start <add> * @param count <add> * @param indexOffset <add> */ <ide> public final native void addDrawCall(int start,int count,int indexOffset)/*-{ <ide> this.addDrawCall(start,count,indexOffset); <ide> }-*/; <ide> this.normalizeNormals(); <ide> }-*/; <ide> <add>/** <add> * @deprecated on r72 <add> */ <ide> public final native void computeTangents()/*-{ <ide> this.computeTangents(); <ide> }-*/; <ide> public final native BufferGeometry clone()/*-{ <ide> return this.clone(); <ide> }-*/; <add> <add>public final native void addGroup(int start,int count,int materialIndex)/*-{ <add>this.addGroup(start,count,materialIndex); <add>}-*/; <add> <add>public final native void clearGroups()/*-{ <add>this.clearGroups(); <add>}-*/; <add> <add>public final native void setDrawRange(int start,int count)/*-{ <add>this.setDrawRange(start,count); <add>}-*/; <add> <add>public final native void rotateX(double angle)/*-{ <add>this.rotateX(angle); <add>}-*/; <add> <add>public final native void rotateY(double angle)/*-{ <add>this.rotateY(angle); <add>}-*/; <add> <add>public final native void rotateZ(double angle)/*-{ <add>this.rotateZ(angle); <add>}-*/; <add> <add>public final native void translate(double x,double y,double z)/*-{ <add>this.translate(x,y,z); <add>}-*/; <add> <add>public final native void scale(double x,double y,double z)/*-{ <add>this.scale(x,y,z); <add>}-*/; <add> <add>public final native void lookAt(Vector3 vector)/*-{ <add>this.lookAt(vector); <add>}-*/; <ide> }
Java
bsd-2-clause
631da1f482a472480c18fbd30cc62ee1af7b8b98
0
imagejan/imagej-updater
/* * #%L * ImageJ software for multidimensional image processing and analysis. * %% * Copyright (C) 2009 - 2014 Board of Regents of the University of * Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck * Institute of Molecular Cell Biology and Genetics. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package net.imagej.updater; import static net.imagej.updater.FilesCollection.DEFAULT_UPDATE_SITE; import static net.imagej.updater.UpdaterTestUtils.addUpdateSite; import static net.imagej.updater.UpdaterTestUtils.assertStatus; import static net.imagej.updater.UpdaterTestUtils.cleanup; import static net.imagej.updater.UpdaterTestUtils.initialize; import static net.imagej.updater.UpdaterTestUtils.main; import static net.imagej.updater.UpdaterTestUtils.writeFile; import static net.imagej.updater.UpdaterTestUtils.writeGZippedFile; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.scijava.util.FileUtils.deleteRecursively; import java.io.File; import java.util.HashSet; import java.util.Set; import net.imagej.updater.FileObject; import net.imagej.updater.FilesCollection; import net.imagej.updater.FileObject.Status; import net.imagej.updater.util.StderrProgress; import org.junit.After; import org.junit.Test; /** * Tests the command-line updater. * * @author Johannes Schindelin */ public class CommandLineUpdaterTest { protected FilesCollection files; protected StderrProgress progress = new StderrProgress(); @After public void after() { if (files != null) cleanup(files); } @Test public void testUploadCompleteSite() throws Exception { final String to_remove = "macros/to_remove.ijm"; final String modified = "macros/modified.ijm"; final String installed = "macros/installed.ijm"; final String new_file = "macros/new_file.ijm"; files = initialize(to_remove, modified, installed); File ijRoot = files.prefix(""); writeFile(new File(ijRoot, modified), "Zing! Zing a zong!"); writeFile(new File(ijRoot, new_file), "Aitch!"); assertTrue(new File(ijRoot, to_remove).delete()); files = main(files, "upload-complete-site", FilesCollection.DEFAULT_UPDATE_SITE); assertStatus(Status.OBSOLETE_UNINSTALLED, files, to_remove); assertStatus(Status.INSTALLED, files, modified); assertStatus(Status.INSTALLED, files, installed); assertStatus(Status.INSTALLED, files, new_file); } @Test public void testUpload() throws Exception { files = initialize(); final String path = "macros/test.ijm"; final File file = files.prefix(path); writeFile(file, "// test"); files = main(files, "upload", "--update-site", DEFAULT_UPDATE_SITE, path); assertStatus(Status.INSTALLED, files, path); assertTrue(file.delete()); files = main(files, "upload", path); assertStatus(Status.OBSOLETE_UNINSTALLED, files, path); } @Test public void testUploadCompleteSiteWithShadow() throws Exception { final String path = "macros/test.ijm"; final String obsolete = "macros/obsolete.ijm"; files = initialize(path, obsolete); assertTrue(files.prefix(obsolete).delete()); files = main(files, "upload", obsolete); final File tmp = addUpdateSite(files, "second"); writeFile(files.prefix(path), "// shadowing"); writeFile(files.prefix(obsolete), obsolete); files = main(files, "upload-complete-site", "--force-shadow", "second"); assertStatus(Status.INSTALLED, files, path); assertStatus(Status.INSTALLED, files, obsolete); files = main(files, "remove-update-site", "second"); assertStatus(Status.MODIFIED, files, path); assertStatus(Status.OBSOLETE, files, obsolete); assertTrue(deleteRecursively(tmp)); } @Test public void testForcedShadow() throws Exception { final String path = "macros/test.ijm"; files = initialize(path); final File tmp = addUpdateSite(files, "second"); files = main(files, "upload", "--update-site", "second", "--force-shadow", path); final File onSecondSite = new File(tmp, path + "-" + files.get(path).current.timestamp); assertTrue("File exists: " + onSecondSite, onSecondSite.exists()); } @Test public void testUploadCompleteSiteWithPlatforms() throws Exception { final String macro = "macros/macro.ijm"; final String linux32 = "lib/linux32/libtest.so"; files = initialize(macro, linux32); assertPlatforms(files.get(linux32), "linux32"); File ijRoot = files.prefix(""); final String win64 = "lib/win64/test.dll"; assertTrue(new File(ijRoot, linux32).delete()); writeFile(new File(ijRoot, win64), "Dummy"); writeFile(new File(ijRoot, macro), "New version"); files = main(files, "upload-complete-site", "--platforms", "win64", FilesCollection.DEFAULT_UPDATE_SITE); assertStatus(Status.NOT_INSTALLED, files, linux32); assertStatus(Status.INSTALLED, files, win64); assertStatus(Status.INSTALLED, files, macro); files = main(files, "upload-complete-site", "--platforms", "all", FilesCollection.DEFAULT_UPDATE_SITE); assertStatus(Status.OBSOLETE_UNINSTALLED, files, linux32); } @Test public void testRemoveFile() throws Exception { final String macro = "macros/macro.ijm"; files = initialize(macro); assertTrue(files.prefix(files.get(macro)).delete()); files = main(files, "upload", macro); assertStatus(Status.OBSOLETE_UNINSTALLED, files, macro); } private void assertPlatforms(final FileObject file, final String... platforms) { final Set<String> filePlatforms = new HashSet<String>(); for (final String platform : file.getPlatforms()) filePlatforms.add(platform); assertEquals(platforms.length, filePlatforms.size()); for (final String platform : platforms) { assertTrue(file.getFilename(true) + "'s platforms should contain " + platform + " (" + filePlatforms + ")", filePlatforms.contains(platform)); } } @Test public void testCircularDependenciesInOtherSite() throws Exception { files = initialize(); final File second = addUpdateSite(files, "second"); final File third = addUpdateSite(files, "third"); // fake circular dependencies writeGZippedFile(second, "db.xml.gz", "<pluginRecords>" + "<plugin filename=\"jars/a.jar\">" + "<version checksum=\"1\" timestamp=\"2\" filesize=\"3\" />" + "<dependency filename=\"jars/b.jar\" timestamp=\"2\" />" + "</plugin>" + "<plugin filename=\"jars/b.jar\">" + "<version checksum=\"4\" timestamp=\"2\" filesize=\"5\" />" + "<dependency filename=\"jars/a.jar\" timestamp=\"2\" />" + "</plugin>" + "</pluginRecords>"); writeFile(files, "macros/a.ijm"); files = main(files, "upload", "--update-site", "third", "macros/a.ijm"); final File uploaded = new File(third, "macros/a.ijm-" + files.get("macros/a.ijm").current.timestamp); assertTrue(uploaded.exists()); // make sure that circular dependencies are still reported when uploading to the site writeFile(files, "macros/b.ijm"); try { files = main(files, "upload", "--update-site", "second", "macros/b.ijm"); assertTrue("Circular dependency not reported!", false); } catch (RuntimeException e) { assertEquals("Circular dependency detected: jars/b.jar -> jars/a.jar -> jars/b.jar\n", e.getMessage()); } } @Test public void testUploadUnchangedNewVersion() throws Exception { final String path1 = "jars/test-1.0.0.jar"; final String path2 = "jars/test-1.0.1.jar"; files = initialize(path1); files = main(files, "upload", "--update-site", DEFAULT_UPDATE_SITE, path1); assertStatus(Status.INSTALLED, files, path1); final File file1 = files.prefix(path1); final File file2 = files.prefix(path2); assertTrue(file1.renameTo(file2)); files = main(files, "upload", path2); assertFalse(file1.exists()); assertTrue(file2.delete()); files = main(files, "update-force-pristine"); assertEquals(path2, files.get(path2).filename); assertStatus(Status.INSTALLED, files, path2); assertFalse(file1.exists()); assertTrue(file2.exists()); } }
src/test/java/net/imagej/updater/CommandLineUpdaterTest.java
/* * #%L * ImageJ software for multidimensional image processing and analysis. * %% * Copyright (C) 2009 - 2014 Board of Regents of the University of * Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck * Institute of Molecular Cell Biology and Genetics. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package net.imagej.updater; import static net.imagej.updater.FilesCollection.DEFAULT_UPDATE_SITE; import static net.imagej.updater.UpdaterTestUtils.addUpdateSite; import static net.imagej.updater.UpdaterTestUtils.assertStatus; import static net.imagej.updater.UpdaterTestUtils.cleanup; import static net.imagej.updater.UpdaterTestUtils.initialize; import static net.imagej.updater.UpdaterTestUtils.main; import static net.imagej.updater.UpdaterTestUtils.writeFile; import static net.imagej.updater.UpdaterTestUtils.writeGZippedFile; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.scijava.util.FileUtils.deleteRecursively; import java.io.File; import java.util.HashSet; import java.util.Set; import net.imagej.updater.FileObject; import net.imagej.updater.FilesCollection; import net.imagej.updater.FileObject.Status; import net.imagej.updater.util.StderrProgress; import org.junit.After; import org.junit.Test; /** * Tests the command-line updater. * * @author Johannes Schindelin */ public class CommandLineUpdaterTest { protected FilesCollection files; protected StderrProgress progress = new StderrProgress(); @After public void after() { if (files != null) cleanup(files); } @Test public void testUploadCompleteSite() throws Exception { final String to_remove = "macros/to_remove.ijm"; final String modified = "macros/modified.ijm"; final String installed = "macros/installed.ijm"; final String new_file = "macros/new_file.ijm"; files = initialize(to_remove, modified, installed); File ijRoot = files.prefix(""); writeFile(new File(ijRoot, modified), "Zing! Zing a zong!"); writeFile(new File(ijRoot, new_file), "Aitch!"); assertTrue(new File(ijRoot, to_remove).delete()); files = main(files, "upload-complete-site", FilesCollection.DEFAULT_UPDATE_SITE); assertStatus(Status.OBSOLETE_UNINSTALLED, files, to_remove); assertStatus(Status.INSTALLED, files, modified); assertStatus(Status.INSTALLED, files, installed); assertStatus(Status.INSTALLED, files, new_file); } @Test public void testUpload() throws Exception { files = initialize(); final String path = "macros/test.ijm"; final File file = files.prefix(path); writeFile(file, "// test"); files = main(files, "upload", "--update-site", DEFAULT_UPDATE_SITE, path); assertStatus(Status.INSTALLED, files, path); assertTrue(file.delete()); files = main(files, "upload", path); assertStatus(Status.OBSOLETE_UNINSTALLED, files, path); } @Test public void testUploadCompleteSiteWithShadow() throws Exception { final String path = "macros/test.ijm"; final String obsolete = "macros/obsolete.ijm"; files = initialize(path, obsolete); assertTrue(files.prefix(obsolete).delete()); files = main(files, "upload", obsolete); final File tmp = addUpdateSite(files, "second"); writeFile(files.prefix(path), "// shadowing"); writeFile(files.prefix(obsolete), obsolete); files = main(files, "upload-complete-site", "--force-shadow", "second"); assertStatus(Status.INSTALLED, files, path); assertStatus(Status.INSTALLED, files, obsolete); files = main(files, "remove-update-site", "second"); assertStatus(Status.MODIFIED, files, path); assertStatus(Status.OBSOLETE, files, obsolete); assertTrue(deleteRecursively(tmp)); } @Test public void testForcedShadow() throws Exception { final String path = "macros/test.ijm"; files = initialize(path); final File tmp = addUpdateSite(files, "second"); files = main(files, "upload", "--update-site", "second", "--force-shadow", path); final File onSecondSite = new File(tmp, path + "-" + files.get(path).current.timestamp); assertTrue("File exists: " + onSecondSite, onSecondSite.exists()); } @Test public void testUploadCompleteSiteWithPlatforms() throws Exception { final String macro = "macros/macro.ijm"; final String linux32 = "lib/linux32/libtest.so"; files = initialize(macro, linux32); assertPlatforms(files.get(linux32), "linux32"); File ijRoot = files.prefix(""); final String win64 = "lib/win64/test.dll"; assertTrue(new File(ijRoot, linux32).delete()); writeFile(new File(ijRoot, win64), "Dummy"); writeFile(new File(ijRoot, macro), "New version"); files = main(files, "upload-complete-site", "--platforms", "win64", FilesCollection.DEFAULT_UPDATE_SITE); assertStatus(Status.NOT_INSTALLED, files, linux32); assertStatus(Status.INSTALLED, files, win64); assertStatus(Status.INSTALLED, files, macro); files = main(files, "upload-complete-site", "--platforms", "all", FilesCollection.DEFAULT_UPDATE_SITE); assertStatus(Status.OBSOLETE_UNINSTALLED, files, linux32); } @Test public void testRemoveFile() throws Exception { final String macro = "macros/macro.ijm"; files = initialize(macro); assertTrue(files.prefix(files.get(macro)).delete()); files = main(files, "upload", macro); assertStatus(Status.OBSOLETE_UNINSTALLED, files, macro); } private void assertPlatforms(final FileObject file, final String... platforms) { final Set<String> filePlatforms = new HashSet<String>(); for (final String platform : file.getPlatforms()) filePlatforms.add(platform); assertEquals(platforms.length, filePlatforms.size()); for (final String platform : platforms) { assertTrue(file.getFilename(true) + "'s platforms should contain " + platform + " (" + filePlatforms + ")", filePlatforms.contains(platform)); } } @Test public void testCircularDependenciesInOtherSite() throws Exception { files = initialize(); final File second = addUpdateSite(files, "second"); final File third = addUpdateSite(files, "third"); // fake circular dependencies writeGZippedFile(second, "db.xml.gz", "<pluginRecords>" + "<plugin filename=\"jars/a.jar\">" + "<version checksum=\"1\" timestamp=\"2\" filesize=\"3\" />" + "<dependency filename=\"jars/b.jar\" timestamp=\"2\" />" + "</plugin>" + "<plugin filename=\"jars/b.jar\">" + "<version checksum=\"4\" timestamp=\"2\" filesize=\"5\" />" + "<dependency filename=\"jars/a.jar\" timestamp=\"2\" />" + "</plugin>" + "</pluginRecords>"); writeFile(files, "macros/a.ijm"); files = main(files, "upload", "--update-site", "third", "macros/a.ijm"); final File uploaded = new File(third, "macros/a.ijm-" + files.get("macros/a.ijm").current.timestamp); assertTrue(uploaded.exists()); // make sure that circular dependencies are still reported when uploading to the site writeFile(files, "macros/b.ijm"); try { files = main(files, "upload", "--update-site", "second", "macros/b.ijm"); assertTrue("Circular dependency not reported!", false); } catch (RuntimeException e) { assertEquals("Circular dependency detected: jars/b.jar -> jars/a.jar -> jars/b.jar\n", e.getMessage()); } } }
Add regression test for the upload of bogus new versions Signed-off-by: Johannes Schindelin <[email protected]>
src/test/java/net/imagej/updater/CommandLineUpdaterTest.java
Add regression test for the upload of bogus new versions
<ide><path>rc/test/java/net/imagej/updater/CommandLineUpdaterTest.java <ide> import static net.imagej.updater.UpdaterTestUtils.writeFile; <ide> import static net.imagej.updater.UpdaterTestUtils.writeGZippedFile; <ide> import static org.junit.Assert.assertEquals; <add>import static org.junit.Assert.assertFalse; <ide> import static org.junit.Assert.assertTrue; <ide> import static org.scijava.util.FileUtils.deleteRecursively; <ide> <ide> e.getMessage()); <ide> } <ide> } <add> <add> @Test <add> public void testUploadUnchangedNewVersion() throws Exception { <add> final String path1 = "jars/test-1.0.0.jar"; <add> final String path2 = "jars/test-1.0.1.jar"; <add> files = initialize(path1); <add> <add> files = main(files, "upload", "--update-site", DEFAULT_UPDATE_SITE, path1); <add> <add> assertStatus(Status.INSTALLED, files, path1); <add> <add> final File file1 = files.prefix(path1); <add> final File file2 = files.prefix(path2); <add> assertTrue(file1.renameTo(file2)); <add> files = main(files, "upload", path2); <add> <add> assertFalse(file1.exists()); <add> assertTrue(file2.delete()); <add> files = main(files, "update-force-pristine"); <add> <add> assertEquals(path2, files.get(path2).filename); <add> assertStatus(Status.INSTALLED, files, path2); <add> assertFalse(file1.exists()); <add> assertTrue(file2.exists()); <add> } <ide> }
Java
apache-2.0
c69d4e14cd8dab145ad03b950a682274712d982a
0
langfr/camunda-bpm-platform,camunda/camunda-bpm-platform,camunda/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,langfr/camunda-bpm-platform,langfr/camunda-bpm-platform,langfr/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,falko/camunda-bpm-platform,camunda/camunda-bpm-platform,xasx/camunda-bpm-platform,camunda/camunda-bpm-platform,falko/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,xasx/camunda-bpm-platform,langfr/camunda-bpm-platform,xasx/camunda-bpm-platform,falko/camunda-bpm-platform,xasx/camunda-bpm-platform,camunda/camunda-bpm-platform,camunda/camunda-bpm-platform,langfr/camunda-bpm-platform,xasx/camunda-bpm-platform,xasx/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform,falko/camunda-bpm-platform,falko/camunda-bpm-platform,falko/camunda-bpm-platform,ingorichtsmeier/camunda-bpm-platform
/* * Copyright © 2013-2019 camunda services GmbH and various authors ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.impl; import java.io.ByteArrayInputStream; import java.net.URI; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.ws.rs.HttpMethod; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.RepositoryService; import org.camunda.bpm.engine.repository.*; import org.camunda.bpm.engine.rest.DeploymentRestService; import org.camunda.bpm.engine.rest.dto.CountResultDto; import org.camunda.bpm.engine.rest.dto.repository.DeploymentDto; import org.camunda.bpm.engine.rest.dto.repository.DeploymentQueryDto; import org.camunda.bpm.engine.rest.dto.repository.DeploymentWithDefinitionsDto; import org.camunda.bpm.engine.rest.dto.repository.ProcessDefinitionDto; import org.camunda.bpm.engine.rest.exception.InvalidRequestException; import org.camunda.bpm.engine.rest.mapper.MultipartFormData; import org.camunda.bpm.engine.rest.mapper.MultipartFormData.FormPart; import org.camunda.bpm.engine.rest.sub.repository.DeploymentResource; import org.camunda.bpm.engine.rest.sub.repository.impl.DeploymentResourceImpl; import com.fasterxml.jackson.databind.ObjectMapper; public class DeploymentRestServiceImpl extends AbstractRestProcessEngineAware implements DeploymentRestService { public final static String DEPLOYMENT_NAME = "deployment-name"; public final static String ENABLE_DUPLICATE_FILTERING = "enable-duplicate-filtering"; public final static String DEPLOY_CHANGED_ONLY = "deploy-changed-only"; public final static String DEPLOYMENT_SOURCE = "deployment-source"; public final static String TENANT_ID = "tenant-id"; protected static final Set<String> RESERVED_KEYWORDS = new HashSet<String>(); static { RESERVED_KEYWORDS.add(DEPLOYMENT_NAME); RESERVED_KEYWORDS.add(ENABLE_DUPLICATE_FILTERING); RESERVED_KEYWORDS.add(DEPLOY_CHANGED_ONLY); RESERVED_KEYWORDS.add(DEPLOYMENT_SOURCE); RESERVED_KEYWORDS.add(TENANT_ID); } public DeploymentRestServiceImpl(String engineName, ObjectMapper objectMapper) { super(engineName, objectMapper); } public DeploymentResource getDeployment(String deploymentId) { return new DeploymentResourceImpl(getProcessEngine().getName(), deploymentId, relativeRootResourcePath, getObjectMapper()); } public List<DeploymentDto> getDeployments(UriInfo uriInfo, Integer firstResult, Integer maxResults) { DeploymentQueryDto queryDto = new DeploymentQueryDto(getObjectMapper(), uriInfo.getQueryParameters()); ProcessEngine engine = getProcessEngine(); DeploymentQuery query = queryDto.toQuery(engine); List<Deployment> matchingDeployments; if (firstResult != null || maxResults != null) { matchingDeployments = executePaginatedQuery(query, firstResult, maxResults); } else { matchingDeployments = query.list(); } List<DeploymentDto> deployments = new ArrayList<DeploymentDto>(); for (Deployment deployment : matchingDeployments) { DeploymentDto def = DeploymentDto.fromDeployment(deployment); deployments.add(def); } return deployments; } public DeploymentWithDefinitionsDto createDeployment(UriInfo uriInfo, MultipartFormData payload) { DeploymentBuilder deploymentBuilder = extractDeploymentInformation(payload); if(!deploymentBuilder.getResourceNames().isEmpty()) { DeploymentWithDefinitions deployment = deploymentBuilder.deployWithResult(); DeploymentWithDefinitionsDto deploymentDto = DeploymentWithDefinitionsDto.fromDeployment(deployment); URI uri = uriInfo.getBaseUriBuilder() .path(relativeRootResourcePath) .path(DeploymentRestService.PATH) .path(deployment.getId()) .build(); // GET deploymentDto.addReflexiveLink(uri, HttpMethod.GET, "self"); return deploymentDto; } else { throw new InvalidRequestException(Status.BAD_REQUEST, "No deployment resources contained in the form upload."); } } private DeploymentBuilder extractDeploymentInformation(MultipartFormData payload) { DeploymentBuilder deploymentBuilder = getProcessEngine().getRepositoryService().createDeployment(); Set<String> partNames = payload.getPartNames(); for (String name : partNames) { FormPart part = payload.getNamedPart(name); if (!RESERVED_KEYWORDS.contains(name)) { String fileName = part.getFileName(); if (fileName != null) { deploymentBuilder.addInputStream(part.getFileName(), new ByteArrayInputStream(part.getBinaryContent())); } else { throw new InvalidRequestException(Status.BAD_REQUEST, "No file name found in the deployment resource described by form parameter '" + fileName + "'."); } } } FormPart deploymentName = payload.getNamedPart(DEPLOYMENT_NAME); if (deploymentName != null) { deploymentBuilder.name(deploymentName.getTextContent()); } FormPart deploymentSource = payload.getNamedPart(DEPLOYMENT_SOURCE); if (deploymentSource != null) { deploymentBuilder.source(deploymentSource.getTextContent()); } FormPart deploymentTenantId = payload.getNamedPart(TENANT_ID); if (deploymentTenantId != null) { deploymentBuilder.tenantId(deploymentTenantId.getTextContent()); } extractDuplicateFilteringForDeployment(payload, deploymentBuilder); return deploymentBuilder; } private void extractDuplicateFilteringForDeployment(MultipartFormData payload, DeploymentBuilder deploymentBuilder) { boolean enableDuplicateFiltering = false; boolean deployChangedOnly = false; FormPart deploymentEnableDuplicateFiltering = payload.getNamedPart(ENABLE_DUPLICATE_FILTERING); if (deploymentEnableDuplicateFiltering != null) { enableDuplicateFiltering = Boolean.parseBoolean(deploymentEnableDuplicateFiltering.getTextContent()); } FormPart deploymentDeployChangedOnly = payload.getNamedPart(DEPLOY_CHANGED_ONLY); if (deploymentDeployChangedOnly != null) { deployChangedOnly = Boolean.parseBoolean(deploymentDeployChangedOnly.getTextContent()); } // deployChangedOnly overrides the enableDuplicateFiltering setting if (deployChangedOnly) { deploymentBuilder.enableDuplicateFiltering(true); } else if (enableDuplicateFiltering) { deploymentBuilder.enableDuplicateFiltering(false); } } private List<Deployment> executePaginatedQuery(DeploymentQuery query, Integer firstResult, Integer maxResults) { if (firstResult == null) { firstResult = 0; } if (maxResults == null) { maxResults = Integer.MAX_VALUE; } return query.listPage(firstResult, maxResults); } public CountResultDto getDeploymentsCount(UriInfo uriInfo) { DeploymentQueryDto queryDto = new DeploymentQueryDto(getObjectMapper(), uriInfo.getQueryParameters()); ProcessEngine engine = getProcessEngine(); DeploymentQuery query = queryDto.toQuery(engine); long count = query.count(); CountResultDto result = new CountResultDto(); result.setCount(count); return result; } }
engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/impl/DeploymentRestServiceImpl.java
/* * Copyright © 2013-2019 camunda services GmbH and various authors ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.impl; import java.io.ByteArrayInputStream; import java.net.URI; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.ws.rs.HttpMethod; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.RepositoryService; import org.camunda.bpm.engine.repository.*; import org.camunda.bpm.engine.rest.DeploymentRestService; import org.camunda.bpm.engine.rest.dto.CountResultDto; import org.camunda.bpm.engine.rest.dto.repository.DeploymentDto; import org.camunda.bpm.engine.rest.dto.repository.DeploymentQueryDto; import org.camunda.bpm.engine.rest.dto.repository.DeploymentWithDefinitionsDto; import org.camunda.bpm.engine.rest.dto.repository.ProcessDefinitionDto; import org.camunda.bpm.engine.rest.exception.InvalidRequestException; import org.camunda.bpm.engine.rest.mapper.MultipartFormData; import org.camunda.bpm.engine.rest.mapper.MultipartFormData.FormPart; import org.camunda.bpm.engine.rest.sub.repository.DeploymentResource; import org.camunda.bpm.engine.rest.sub.repository.impl.DeploymentResourceImpl; import com.fasterxml.jackson.databind.ObjectMapper; public class DeploymentRestServiceImpl extends AbstractRestProcessEngineAware implements DeploymentRestService { public final static String DEPLOYMENT_NAME = "deployment-name"; public final static String ENABLE_DUPLICATE_FILTERING = "enable-duplicate-filtering"; public final static String DEPLOY_CHANGED_ONLY = "deploy-changed-only"; public final static String DEPLOYMENT_SOURCE = "deployment-source"; public final static String TENANT_ID = "tenant-id"; protected static final Set<String> RESERVED_KEYWORDS = new HashSet<String>(); static { RESERVED_KEYWORDS.add(DEPLOYMENT_NAME); RESERVED_KEYWORDS.add(ENABLE_DUPLICATE_FILTERING); RESERVED_KEYWORDS.add(DEPLOY_CHANGED_ONLY); RESERVED_KEYWORDS.add(DEPLOYMENT_SOURCE); RESERVED_KEYWORDS.add(TENANT_ID); } public DeploymentRestServiceImpl(String engineName, ObjectMapper objectMapper) { super(engineName, objectMapper); } public DeploymentResource getDeployment(String deploymentId) { return new DeploymentResourceImpl(getProcessEngine().getName(), deploymentId, relativeRootResourcePath, getObjectMapper()); } public List<DeploymentDto> getDeployments(UriInfo uriInfo, Integer firstResult, Integer maxResults) { DeploymentQueryDto queryDto = new DeploymentQueryDto(getObjectMapper(), uriInfo.getQueryParameters()); ProcessEngine engine = getProcessEngine(); DeploymentQuery query = queryDto.toQuery(engine); List<Deployment> matchingDeployments; if (firstResult != null || maxResults != null) { matchingDeployments = executePaginatedQuery(query, firstResult, maxResults); } else { matchingDeployments = query.list(); } List<DeploymentDto> deployments = new ArrayList<DeploymentDto>(); for (Deployment deployment : matchingDeployments) { DeploymentDto def = DeploymentDto.fromDeployment(deployment); deployments.add(def); } return deployments; } public DeploymentWithDefinitionsDto createDeployment(UriInfo uriInfo, MultipartFormData payload) { DeploymentBuilder deploymentBuilder = extractDeploymentInformation(payload); if(!deploymentBuilder.getResourceNames().isEmpty()) { DeploymentWithDefinitions deployment = deploymentBuilder.deployWithResult(); DeploymentWithDefinitionsDto deploymentDto = DeploymentWithDefinitionsDto.fromDeployment(deployment); URI uri = uriInfo.getBaseUriBuilder() .path(relativeRootResourcePath) .path(DeploymentRestService.PATH) .path(deployment.getId()) .build(); // GET deploymentDto.addReflexiveLink(uri, HttpMethod.GET, "self"); return deploymentDto; } else { throw new InvalidRequestException(Status.BAD_REQUEST, "No deployment resources contained in the form upload."); } } private DeploymentBuilder extractDeploymentInformation(MultipartFormData payload) { DeploymentBuilder deploymentBuilder = getProcessEngine().getRepositoryService().createDeployment(); Set<String> partNames = payload.getPartNames(); for (String name : partNames) { FormPart part = payload.getNamedPart(name); if (!RESERVED_KEYWORDS.contains(name)) { String fileName = part.getFileName(); if (fileName != null) { deploymentBuilder.addInputStream(part.getFileName(), new ByteArrayInputStream(part.getBinaryContent())); } else { throw new InvalidRequestException(Status.BAD_REQUEST, "No deployment resources found for file name " + name + "."); } } } FormPart deploymentName = payload.getNamedPart(DEPLOYMENT_NAME); if (deploymentName != null) { deploymentBuilder.name(deploymentName.getTextContent()); } FormPart deploymentSource = payload.getNamedPart(DEPLOYMENT_SOURCE); if (deploymentSource != null) { deploymentBuilder.source(deploymentSource.getTextContent()); } FormPart deploymentTenantId = payload.getNamedPart(TENANT_ID); if (deploymentTenantId != null) { deploymentBuilder.tenantId(deploymentTenantId.getTextContent()); } extractDuplicateFilteringForDeployment(payload, deploymentBuilder); return deploymentBuilder; } private void extractDuplicateFilteringForDeployment(MultipartFormData payload, DeploymentBuilder deploymentBuilder) { boolean enableDuplicateFiltering = false; boolean deployChangedOnly = false; FormPart deploymentEnableDuplicateFiltering = payload.getNamedPart(ENABLE_DUPLICATE_FILTERING); if (deploymentEnableDuplicateFiltering != null) { enableDuplicateFiltering = Boolean.parseBoolean(deploymentEnableDuplicateFiltering.getTextContent()); } FormPart deploymentDeployChangedOnly = payload.getNamedPart(DEPLOY_CHANGED_ONLY); if (deploymentDeployChangedOnly != null) { deployChangedOnly = Boolean.parseBoolean(deploymentDeployChangedOnly.getTextContent()); } // deployChangedOnly overrides the enableDuplicateFiltering setting if (deployChangedOnly) { deploymentBuilder.enableDuplicateFiltering(true); } else if (enableDuplicateFiltering) { deploymentBuilder.enableDuplicateFiltering(false); } } private List<Deployment> executePaginatedQuery(DeploymentQuery query, Integer firstResult, Integer maxResults) { if (firstResult == null) { firstResult = 0; } if (maxResults == null) { maxResults = Integer.MAX_VALUE; } return query.listPage(firstResult, maxResults); } public CountResultDto getDeploymentsCount(UriInfo uriInfo) { DeploymentQueryDto queryDto = new DeploymentQueryDto(getObjectMapper(), uriInfo.getQueryParameters()); ProcessEngine engine = getProcessEngine(); DeploymentQuery query = queryDto.toQuery(engine); long count = query.count(); CountResultDto result = new CountResultDto(); result.setCount(count); return result; } }
chore(rest): adjust error message Related to CAM-9546
engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/impl/DeploymentRestServiceImpl.java
chore(rest): adjust error message
<ide><path>ngine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/impl/DeploymentRestServiceImpl.java <ide> if (fileName != null) { <ide> deploymentBuilder.addInputStream(part.getFileName(), new ByteArrayInputStream(part.getBinaryContent())); <ide> } else { <del> throw new InvalidRequestException(Status.BAD_REQUEST, "No deployment resources found for file name " + name + "."); <add> throw new InvalidRequestException(Status.BAD_REQUEST, "No file name found in the deployment resource described by form parameter '" + fileName + "'."); <ide> } <ide> } <ide> }
JavaScript
mit
2632e5cfabef2ac1d81e515dc176883d8ddf2791
0
JackAdams/meteor-transactions,JackAdams/meteor-transactions,Data-Meister/meteor-transactions,Data-Meister/meteor-transactions
// ******************************* // Transactions for Meteor + Mongo // v0.8.0 // by Brent Abrahams // [email protected] // MIT Licence 2017 // ******************************* // This package adds one new mongo collection to your app // It is exposed to the app via `tx.Transactions`, not via `Transactions` // In much the same way that we have `Meteor.users` rather than `Users` Transactions = new Mongo.Collection(Meteor.settings && Meteor.settings.transactionsCollection || "transactions"); if (Meteor.isServer) { Transactions.allow({ insert: function (userId, doc) { return (_.has(doc, "items") || doc.user_id !== userId) ? false : true; }, update: function (userId, doc, fields, modifier) { if (userId !== doc.user_id) { // TODO -- this condition will need to be modified to allow an admin to look through transactions and undo/redo them from the client // That said, an admin interface shouldn't really be messing with the transactions collection from the client anyway, so ignoring this for now return false; } else { if (tx._validateModifier(modifier, doc._id)) { return true; } else { // Transactions.remove({_id:doc._id}); return false; } } }, remove: function (userId, doc) { var fullDoc = Transactions.findOne({_id: doc._id}); return fullDoc && fullDoc.user_id === userId; } }); } TransactionManager = function () { // This is instantiated as `tx` and maintains a map that points to the correct instance of `Transact` // It also carries all of the globally configurable parts of the `tx` object, which intances of `Transact` will reference // The per-transaction methods that this manager needs to direct to the correct `Transact` instance are: // tx.start // tx.insert // tx.update // tx.remove // tx.commit // tx.cancel // tx.rollback // ******************************************************* // THIS IS THE MAP FROM CONNECTION TO TRANSACTION INSTANCE // ******************************************************* this._txMap = {}; // This ensures that the map contains a `Transact` instance for each connection and also for the default (when there is no connection) this._instance = function () { var instanceKey = 'default'; var currentInvocation = DDP._CurrentInvocation; if (_.isFunction(currentInvocation.get)) { var ci = currentInvocation.get(); if (ci && ci.connection && ci.connection.id) { instanceKey = ci.connection.id; } } var instance = this._txMap[instanceKey]; if (!instance) { this._txMap[instanceKey] = new Transact(); // Let the instance know its own connection_id this._txMap[instanceKey]._connectionId = instanceKey; instance = this._txMap[instanceKey]; } return instance; } // ********************************************************************************************************** // YOU CAN OPTIONALLY OVERWRITE tx.collectionIndex TO MAKE THE TRANSACTIONS WORK FOR CERTAIN COLLECTIONS ONLY // ********************************************************************************************************** // e.g. in a file shared by client and server, write: // tx.collectionIndex = {posts:Posts,comments:Comments, etc...} // where the key is the name of the database collection (e.g. 'posts') and the value is the actual Meteor Mongo.Collection object (e.g. Posts) // by default, all collections are added to tx.collectionIndex on startup in a "Meteor.startup(function (){ Meteor.defer(function () { ..." block // so if you are planning to overwrite tx.collectionIndex, you'll also need to wrap your code in "Meteor.startup(function (){ Meteor.defer(function () { ..." block // there's no real need to do this for most applications though this.collectionIndex = {}; // ******************************************************************** // YOU CAN OPTIONALLY OVERWRITE ANY OF THESE (e.g. tx.logging = false;) // ******************************************************************** // Recommendations about _where_ to overwrite the following attributes/methods are given as [BOTH], [SERVER], or [CLIENT] // Turn logging on or off // [BOTH] this.logging = true; // By default, messages are logged to the console // [BOTH] this.log = function () { if (this.logging) { _.each(arguments, function (message) { console.log(message); }) } }; // To show the connection in the logging from `Transact` instances, set `tx.showConnection = true;` // Useful for debugging this.showConnection = false; // Because most/many db writes will come through the transaction manager, this is a good place to do some permission checking // NOTE: this permission check is the only thing standing between a user and the database if a transaction is committed from a method with no surrounding permission checks of its own // NOTE: if you're commiting transactions from the client, you'll definitely need this as writes do not go through your allow and deny rules // [BOTH] this.checkPermission = function (command, collection, doc, modifier) { return true; }; // commands are "insert", "update", "remove" // For the purpose of filtering transactions later, a "context" field is added to each transaction // By default, we don't do anything -- the context is empty, but there are probably certain fields in the document that we could record to use for filtering. // Remember, if there are multiple document being processed by a single transaction, the values from the last document in the queue will overwrite values for fields that have taken a value from a previous document - last write wins // OVERWRITING THIS WITH tx.makeContext = function () { ... } IS STRONGLY RECOMMENDED // [BOTH] this.makeContext = function (command, collection, doc, modifier) { return {}; }; // TODO -- detect routes and add the path to the context automatically (separate, per router, packages) // If requireUser is set to false, any non-logged in user, using the `babrahams:undo-redo` package gets the undo-redo stack of any non-logged-in user // For security purposes this should always be set to true in real apps that use the `babrahams:undo-redo` package // [BOTH] this.requireUser = false; // This function is called on the client when the user tries to undo (or redo) a transaction in which some (or all) documents have been altered by a later transaction // [CLIENT] this.onTransactionExpired = function () { alert('Sorry. Other edits have been made, so this action can no longer be reversed.'); }; // If app code forgets to close a transaction on the server, it will autoclose after the following number of milliseconds // If a transaction is open on the client, it just stays open indefinitely // [BOTH] this.idleTimeout = 5000; // If something goes wrong and a transaction isn't closed it remains open (on the client - TODO - make the client time out the same way the server does) // This is also a problem on the server, as a transaction will remain open until the it times out // and meanwhile other transactions can't be started and committed // The following can be set to true to stop this from happening, but it means you have to be strict about coding your logic // so that only one transaction is started at any given time // [BOTH] this.forceCommitBeforeStart = false; // By default, documents are hard deleted and a snapshot of the document the moment before deletion is stored for retrieval in the transaction document // This is much more prone to causing bugs and weirdness in apps, e.g. if a removed doc is restored after documents it depends on have been removed // (and whose removal should normally have caused the restored doc to have also been removed and tagged with the same transaction_id) // It's safer to turn softDelete on for complex apps, but having it off makes this package work out of the box better, as the user doesn't have to // use `,deleted:{$exists:false}` in their `find` and `findOne` selectors to keep deleted docs out of the result // [BOTH] this.softDelete = false; // This flag tells the package how to deal with incomplete transactions, undos and redos on startup // The possible modes are: // `complete` - which will try to complete any pending transactions // `rollback` - which will try to restore db to state prior to the pending transaction // any other value will just leave the db in the state it was left at last shutdown // [SERVER] this.selfRepairMode = 'complete'; // If you want to limit the volume of rubbish in the transactions collection // you can set `tx.removeRolledBackTransactions = true` // It is false by default because having a record in the db helps with debugging // [BOTH] this.removeRolledBackTransactions = false; // Overwrite this with something like `tx.lodash = lodash` if you have a lodash package installed // This gives you access to `tx.mergeContext` and `tx.setContextPathValue` this.lodash = null; // The following function `_inverseUsingSet` is necessary for `inverseOperations` // It is not intended to be a part of the public API // Default inverse operation that uses $set to restore original state of updated fields this._inverseUsingSet = function (collection, existingDoc, updateMap, opt) { var self = this, inverseCommand = '$set', formerValues = {}; _.each(_.keys(updateMap), function (keyName) { // Given a dot delimited string as a key, and an object, find the value var _drillDown = function (obj, key) { return Meteor._get.apply(null, [obj].concat(key.split('.'))); // Previous implementation, which worked fine but was more LOC than necessary /*var pieces = key.split('.'); if (pieces.length > 1) { var newObj = obj ? obj[pieces[0]] : {}; pieces.shift(); return this._drillDown(newObj,pieces.join('.')); } else { if (obj) { return obj[key]; } else { return; // undefined } }*/ } var formerVal = _drillDown(existingDoc || {}, keyName); if (typeof formerVal !== 'undefined') { // Restore former value inverseCommand = '$set'; formerValues[keyName] = formerVal; } else { // Field was already unset, so just $unset it again inverseCommand = '$unset'; formerValues[keyName] = 1; } }); return {command: inverseCommand, data: formerValues}; }; // Functions to work out the inverse operation that will reverse a single collection update command. // This default implementation attempts to reverse individual array $set and $addToSet operations with // corresponding $pull operations, but this may not work in some cases, eg: // 1. if a $addToSet operation does nothing because the value is already in the array, the inverse $pull will remove that value from the array, even though // it was there before the $addToSet action // 2. if a $addToSet operation had a $each qualifier (to add multiple values), the default inverse $pull operation will fail because $each is not suported for $pull operations // // You may wish to provide your own implementations that override some of these functions to address these issues if they affect your own application. // For example, store the entire state of the array being mutated by a $set / $addToSet or $pull operation, and then restore it using a inverse $set operation. // The tx.inverse function is called with the tx object as the `this` data context. // [BOTH] this.inverseOperations = { '$set': this._inverseUsingSet, '$addToSet': function (collection, existingDoc, updateMap, opt) { // Inverse of $addToSet is $pull. // TODO -- not really -- if an element is already present, this will pull it // out after an undo, leaving the value changed // This will not work if $addToSet uses modifiers like $each. // In that case, you need to restore state of original array using $set return {command: '$pull', data: updateMap}; }, '$unset': this._inverseUsingSet, '$pull': function (collection, existingDoc, updateMap, opt) { // Inverse of $pull is $addToSet. // TODO -- same problem as $addToSet above, but in reverse return {command: '$addToSet', data: updateMap}; }, '$inc': this._inverseUsingSet, '$push': function (collection, existingDoc, updateMap, opt) { // Inverse of $push is $pull. // This will not work if $push uses modifiers like $each, or if pushed value has duplicates in same array (because $pull will remove all instances of that value). // In that case, you need to restore state of original array using $set return {command: '$pull', data: updateMap}; }, }; // ****************************************************************************************************** // THESE ARE METHODS EXTERNAL TO THE PACKAGE THAT ARE CALLED OUTSIDE THE CONTEXT OF A `Transact` INSTANCE // ****************************************************************************************************** this.apiFunctions = [ 'start', 'insert', 'update', 'remove', 'cancel', 'commit', 'rollback', 'purgeIncomplete', 'undo', 'redo', 'setContext', 'mergeContext', 'setContextPathValue', 'getContext', 'transactionStarted' ] // ***************************************************************************************************** // THESE ARE METHODS INTERNAL TO THE PACKAGE BUT ARE CALLED OUTSIDE THE CONTEXT OF A `Transact` INSTANCE // ***************************************************************************************************** this.internalFunctions = [ '_validateModifier', '_repairAllIncomplete', '_userOrNull', '_processTransaction', '_checkTransactionFields', '_unpackageForUpdate', '_changeItemState', '_repairIncomplete' ]; var self = this; _.each(this.apiFunctions.concat(this.internalFunctions), function (fnName) { self[fnName] = function () { var instance = self._instance(); return instance[fnName].apply(instance, arguments); } }); } // ****************************** // Instantiation of the tx object // ****************************** // This (tx) is the object that gets exported for the app to interact with if (typeof tx === 'undefined') { tx = new TransactionManager(); tx.Transactions = Transactions; // Expose the Transactions collection via tx } else { throw new Meteor.Error('`tx` is already defined in the global scope. The babrahams:transactions package won\'t work.'); } // On the server, instances of `Transact` need to be deleted when the connection closes // Keep an eye on server memory -- this may not be enough to prevent a memory leak // , in which case we can take stronger action if (Meteor.isServer) { Meteor.onConnection(function (connection) { if (connection.id) { connection.onClose(function () { delete tx._txMap[connection.id]; }); } }); } Transact = function () { // *************************** // DONT OVERWRITE ANY OF THESE // *************************** // These are transient per-transaction properties that are used internally by the `Transact` instance that is created for the given transaction this._connectionId = null; // Should be immediately overwritten after instantiation this._transaction_id = null; this._autoTransaction = false; this._items = []; this._startAttempts = 0; this._rollback = false; this._rollbackReason = ''; this._autoCancel = null; this._lastTransactionData = null; this._context = {}; this._description = ''; } // ********** // PUBLIC API // ********** // Either use these methods together on the client, or use them together on the server, but don't try mixing the two! /** * Starts a transaction */ Transact.prototype.start = function (description, options) { if (tx.requireUser && !Meteor.userId()) { this.log('User must be logged in to start a transaction.'); this._cleanReset(); return; } this._resetAutoCancel(Meteor.isClient); if (!this._transaction_id) { // Set transaction description if (typeof description === 'undefined') { description = 'last action'; } this._description = description; // Set any transaction options e.g. context if (typeof options === 'object') { if ('context' in options) { this._setContext(options['context']); } } this._transaction_id = Random.id(); // Transactions.insert({user_id:Meteor.userId(),timestamp:(ServerTime.date()).getTime(),description:description}); this.log('Started "' + description + '" with transaction_id: ' + this._transaction_id + ((this._autoTransaction) ? ' (auto started)' : '')); return this._transaction_id; } else { this.log('An attempt to start a transaction ("' + description + '") was made when a transaction was already open. Open transaction_id: ' + this._transaction_id); if ((tx.forceCommitBeforeStart && !(options && (options.useExistingTransaction || options.forceCommitBeforeStart === false))) || (options && options.forceCommitBeforeStart)) { // null parameter to force the commit // last parameter starts a new transaction after the commit tx.commit(null, undefined, undefined, {description: description, options: options}); } else { this._startAttempts++; if (!(options && options.useExistingTransaction)) { return false; } else { this.log('Using existing transaction'); return this._transaction_id; } } } } /** * Checks whether a transaction is already started */ Transact.prototype.transactionStarted = function () { return this._transaction_id || null; } /** * Commits all the changes (actions) queued in the current transaction */ Transact.prototype.commit = function (txid, callback, newId, startNewTransaction) { var self = this; if (tx.requireUser && !Meteor.userId()) { self.log('User must be logged in to commit a transaction.'); this._callback(txid, callback, new Meteor.Error('user-required','No user logged in.'), false); return; } this._lastTransactionData = {}; this._lastTransactionData.transaction_id = this._transaction_id; if (!this._transaction_id) { this._cleanReset(); self.log("Commit reset transaction to clean state"); this._callback(txid, callback, new Meteor.Error('no-transactions-open', 'No transaction open.'), false); return; } // If a transaction is committed and either null is passed or the exact transaction_id value // then we force commit, no matter how many start attempts there have been // otherwise, if there are startAttempts recorded (i.e. tried to start a transaction explicitly while another one was open) // reduce the number of startAttempts by one, fire the callback with false and return if ((_.isString(txid) && txid === this._transaction_id) || txid === null) { // Force commit now self.log("Forced commit"); } else if (this._startAttempts > 0) { this._startAttempts--; this._callback(txid, callback, new Meteor.Error('multiple-transactions-open', 'More than one transaction open. Closing one now to leave ' + this._startAttempts + ' transactions open.'), false); return; } /*if (!_.isFunction(txid) && typeof txid !== 'undefined' && txid !== this._transaction_id && _.isString(txid)) { if (txid === null) { tx.log("Forced commit"); } else { this._startAttempts--; this._callback(txid, callback, new Meteor.Error('multiple-transactions-open', 'More than one transaction open. Closing one now to leave ' + this._startAttempts + ' transactions open.'), false); return; } } if (this._startAttempts > 0 && !(!_.isFunction(txid) && typeof txid !== 'undefined' && (txid === this._transaction_id || txid === null))) { this._startAttempts--; this._callback(txid, callback, new Meteor.Error('multiple-transactions-open', 'More than one transaction open. Closing one now to leave ' + this._startAttempts + ' transactions open.'), false); return; }*/ if (_.isEmpty(this._items)) { // Don't record the transaction if nothing happened // Transactions.remove({_id:this._transaction_id}); self.log('Empty transaction: ' + this._transaction_id); } if (this._rollback) { // One or more permissions failed or the transaction was cancelled, don't process the execution stack var error = this._rollbackReason; var errorDescription = ''; switch (this._rollbackReason) { case 'permission-denied' : errorDescription = 'One or more permissions were denied, so transaction was rolled back.'; break; case 'transaction-cancelled' : errorDescription = 'The transaction was cancelled programatically, so it was rolled back.'; break; default : errorDescription = 'An error occurred when processing an action.'; suppressError = false; break; } this.rollback(); this._callback(txid, callback, new Meteor.Error(error, errorDescription), false); return; } else { self.log('Beginning commit with transaction_id: ' + this._transaction_id); var doRollback = function (err) { self.log("Rolling back changes"); self.rollback(); self._callback(txid, callback, new Meteor.Error('error', 'An error occurred, so transaction was rolled back.', err), false); } try { var runCallback = function (res) { if (!self._lastTransactionData) { self._lastTransactionData = {}; } self._lastTransactionData.transaction_id = self._transaction_id; self._lastTransactionData.writes = res.items; var newIds = _.reduce(res.items, function (memo, item) { if (item.action === 'insert') { if (typeof memo[item.collection] === "undefined") { memo[item.collection] = []; } memo[item.collection].push(item._id); } return memo; }, {}); self._cleanReset(); self.log("Commit reset transaction manager to clean state"); self._callback(txid, callback, null, newIds || true); if (_.isObject(startNewTransaction)) { self.start(startNewTransaction.description, startNewTransaction.options); } } // Okay, this is it -- we're really going to process the queue // So no need to time out the transaction if the processing takes a while // This will be async in the client and syncronous on the server if (Meteor.isServer) { Meteor.clearTimeout(this._autoCancel); var cannotOverridePermissionCheck = false; try { var result = self._processTransaction(this._transaction_id, this._description, this._items, this._context, cannotOverridePermissionCheck); } catch (err) { // self.log(err); } if (!result) { self._callback(txid, callback, new Meteor.Error('error', 'An error occurred, so transaction was rolled back.'), false); return; } runCallback(result); } else { clearTimeout(this._autoCancel); Meteor.call("_meteorTransactionsProcess", this._transaction_id, this._description, this._items, this._context, function (err, res) { if (err || !res) { if (err) { // self.log(err); } self._callback(txid, callback, new Meteor.Error('error','An error occurred, so transaction was rolled back.', err), false); return; } else { // Just in case Meteor.is[SomethingElse] that isn't server but isn't client either if (Meteor.isClient) { Tracker.flush(); // Not sure about this } runCallback(res); } }); } /*Transactions.update({_id:this._transaction_id}, {$set:_.extend({context:this._context}, {items:this._items})});*/ } catch (err) { self.log(err); doRollback(err); return; } } return true; // A flag that at least the call was made // Only the callback from the _meteorTransactionsProcess will really be able to tell us the result of this call } /** * Allows programmatic call of a rollback */ Transact.prototype.rollback = function (rollbackAllDoneItems) { // Need to undo all the instant stuff that's been done // TODO -- this is pretty half-baked -- we should be checking that actions are actually completed before continuing -- not just watching for errors // Eventually, this should be rolled into a single universal function // that iterates over the items array and makes db writes // TODO -- this should probably be run as a method var self = this; var items = this._items.reverse(); var error = false; if (Meteor.isClient) { // Don't let people mess with this from the client // Only the server can call tx.rollback(true); rollbackAllDoneItems = false; } _.each(items, function (obj, index) { if (obj.action === "remove") { if ((obj.instant || rollbackAllDoneItems) && obj.state === 'done') { try { if (obj.doc) { // This was removed from the collection, we need to reinsert it tx.collectionIndex[obj.collection].insert(obj.doc); } else { // This was soft deleted, we need to remove the deleted field tx.collectionIndex[obj.collection].update({_id: obj._id}, {$unset: {deleted: 1, transaction_id: self._transaction_id}}); } self.log('Rolled back remove'); } catch (err) { self.log(err); error = true; } } } if (obj.action === "update") { if (((obj.instant || rollbackAllDoneItems) && obj.state === 'done') && typeof obj.inverse !== 'undefined' && obj.inverse.command && obj.inverse.data) { var operation = {}; operation[obj.inverse.command] = self._unpackageForUpdate(obj.inverse.data); // console.log(operation); try { tx.collectionIndex[obj.collection].update({_id: obj._id}, operation); self.log('Rolled back update'); } catch (err) { self.log(err); error = true; } } } if (obj.action === "insert") { if ((obj.instant || rollbackAllDoneItems) && obj.state === 'done') { var sel = {_id: obj._id}; // This transaction_id check is in case the document has been subsequently edited -- in that case, we don't want it removed from the database completely sel.transaction_id = self._transaction_id; try { tx.collectionIndex[obj.collection].remove(sel); self.log('Rolled back insert'); } catch (err) { self.log(err); error = true; } } } if (!error) { self._changeItemState({ txid: self._transaction_id, index: (items.length - 1) - index, // Because order was reversed for rollback state: "rolledBack" }); } }); if (error) { self.log("Rollback failed -- you'll need to check your database manually for corrupted records."); self.log("Here is a log of the actions that were tried and their inverses:"); self.log("(it was probably one of the inverse actions that caused the problem here)"); self.log(EJSON.stringify(items, null, 2)); } // Server only // Client can't change the transactions collection directly anyway if (Meteor.isServer) { if (tx.removeRolledBackTransactions) { if (rollbackAllDoneItems) { Transactions.remove({_id: this._transaction_id}); } self.log('Incomplete transaction removed: ' + this._transaction_id); } else { if (!Transactions.findOne({_id: this._transaction_id})) { if (this._transaction_id) { var transactionRecord = {_id: this._transaction_id, user_id: tx._userOrNull(), description: this._description, items: items, context: this._context, lastModified: ServerTime.date(), state: "rolledBack"}; Transactions.insert(transactionRecord, function (err, res) { if (err) { self.log('No database record for transaction:', self._transaction_id); } }); } } else { Transactions.update({_id: this._transaction_id}, {$set: {state: "rolledBack"}}); } } } self._cleanReset(); self.log("Rollback reset transaction manager to clean state"); } /** * Queue an insert */ Transact.prototype.insert = function (collection, newDoc, opt, callback) { if (this._rollback || (tx.requireUser && !Meteor.userId())) { return; } // We need to pass the options object when we do the actual insert // But also need to identify any callback functions var callback = (_.isFunction(callback)) ? callback : ((typeof opt !== 'undefined') ? ((_.isFunction(opt)) ? opt : ((_.isFunction(opt.callback)) ? opt.callback : undefined)) : undefined); if (opt && _.isObject(opt.tx)) { opt = opt.tx; } opt = (_.isObject(opt)) ? _.omit(opt,'tx') : undefined; // This is in case we're going to pass this options object on to, say, collection2 (tx must be gone or we'll create an infinite loop) newDoc = _.clone(newDoc); // NOTE: "collection" is the collection object itself, not a string if (this._permissionCheckOverridden(opt) || this._permissionCheck("insert", collection, newDoc, {})) { var self = this; this._openAutoTransaction(opt && opt.description || 'add ' + collection._name.slice(0, - 1)); self._setContext((opt && opt.context) || tx.makeContext('insert', collection, newDoc, {})); if ((typeof opt !== 'undefined' && opt.instant)) { // || this._autoTransaction try { var newId = newDoc._id || Random.id(); _.extend(newDoc, {_id: newId, transaction_id: self._transaction_id}); var newId = self._doInsert(collection, newDoc, opt, callback); // Should give the same newId value as the one we passed var item = self._createItem('insert', collection, newId, {doc: newDoc}, true, self._permissionCheckOverridden(opt)); this._recordTransaction(item); self._pushToRecord("insert", collection, newId, {doc: newDoc}, true, self._permissionCheckOverridden(opt)); // true is to mark this as an instant change this._closeAutoTransaction(opt, callback, newId); self.log("Executed instant insert"); return newId; } catch(err) { self.log(err); self.log("Rollback initiated by instant insert command"); this._rollback = true; this._rollbackReason = 'insert-error'; } } var newId = newDoc._id || Random.id(); // var newId = self._doInsert(collection,_.extend(newDoc,{transaction_id:self._transaction_id}),opt,callback); self._pushToRecord("insert", collection, newId, {doc: _.extend(newDoc, {_id: newId, transaction_id: self._transaction_id})}, false, self._permissionCheckOverridden(opt)); self.log("Pushed insert command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' this._closeAutoTransaction(opt, callback); return newId; } else { this._rollback = true; this._rollbackReason = 'permission-denied'; this.log("Insufficient permissions to insert this document into " + collection._name + ':', newDoc); // Permission to insert not granted return; } } /** * Queue a remove */ Transact.prototype.remove = function (collection, doc, opt, callback) { // Remove any document with a field that has this val // NOTE: "collection" is the collection object itself, not a string if (this._rollback || (tx.requireUser && !Meteor.userId())) { return; } // We need to pass the options object when we do the actual remove // But also need to identify any callback functions var callback = (_.isFunction(callback)) ? callback : ((typeof opt !== 'undefined') ? ((_.isFunction(opt)) ? opt : ((_.isFunction(opt.callback)) ? opt.callback : undefined)) : undefined); if (opt && _.isObject(opt.tx)) { opt = opt.tx; } var _id = (_.isObject(doc)) ? doc._id : doc; var existingDoc = collection.findOne({_id: _id}); // (!_.isObject(doc)) ? collection.findOne({_id: doc}) : doc; // , {transform: null} if (!(_id && existingDoc)) { this.log('No document found. Make sure you provide an _id field for a document that exists. You passed: ' + JSON.stringify(doc)); } if (this._permissionCheckOverridden(opt) || this._permissionCheck("remove", collection, existingDoc, {})) { var self = this; this._openAutoTransaction(opt && opt.description || 'remove ' + collection._name.slice(0, - 1)); var sel = {_id: _id}; if (Meteor.isServer) { sel.deleted = {$exists: false}; // Can only do removes on client using a simple _id selector } self._setContext((opt && opt.context) || tx.makeContext('remove', collection, existingDoc, {})); if (opt && opt.instant) { try { self._doRemove(collection, _id, sel, true, opt, callback); self.log("Executed instant remove"); } catch(err) { self.log(err); self.log("Rollback initiated by instant remove command"); this._rollback = true; this._rollbackReason = 'remove-error'; } } else { self._doRemove(collection, _id, sel, false, opt, callback); self.log("Pushed remove command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' } this._closeAutoTransaction(opt, callback); return !this._rollback; // Remove was executed or queued for execution } else { this._rollback = true; this._rollbackReason = 'permission-denied'; this.log("Insufficient permissions to remove this document from " + collection._name + ':', existingDoc); // Permission to remove not granted return; } } /** * Queue an update */ Transact.prototype.update = function (collection, doc, updates, opt, callback) { // NOTE: "field" should be of the form {$set:{field:value}}, etc. // NOTE: "collection" is the collection object itself, not a string if (this._rollback || (tx.requireUser && !Meteor.userId())) { return; } // We need to pass the options object when we do the actual update // But also need to identify any callback functions var callback = (_.isFunction(callback)) ? callback : ((typeof opt !== 'undefined') ? ((_.isFunction(opt)) ? opt : ((_.isFunction(opt.callback)) ? opt.callback : undefined)) : undefined); if (opt && _.isObject(opt.tx)) { opt = opt.tx; } var opt = (_.isObject(opt)) ? _.omit(opt,'tx') : undefined; var self = this; var _id = (_.isObject(doc)) ? doc._id : doc; var existingDoc = collection.findOne({_id: _id}); // , {transform: null} // var existingDoc = (!_.isObject(doc)) ? collection.findOne({_id:_id}) : doc; // the above is slightly more efficient, in that it doesn't hit the database again // but potential buggy behaviour if a partial doc is passed and the field being updated // isn't in it and it's a $set command and so the inverse is wrongly taken to be $unset if (!(_id && existingDoc)) { self.log('No document found. Make sure you provide an _id field for a document that exists. You passed: ' + JSON.stringify(doc)); } if (this._permissionCheckOverridden(opt) || this._permissionCheck("update", collection, existingDoc, updates)) { this._openAutoTransaction(opt && opt.description || 'update ' + collection._name.slice(0, - 1)); var actionFields = _.pairs(updates); // console.log('actionFields:', actionFields); // If the command is $set, the update map needs to be split into individual fields in case some didn't exist before and some did // in which case different fields will require different inverse operations actionFields = _.reduce(actionFields, function (memo, action) { var command = action[0]; // console.log("the command:", command); var updateMap = action[1]; // console.log("the map:", updateMap); if (command === '$set') { actions = _.map(updateMap, function (val, key) { var obj = {}; obj[key] = val; return obj; }); // console.log("the actions:", actions); } else { actions = [updateMap]; } _.each(actions, function (update) { memo.push([command, update]); }); return memo; }, []); // console.log("actionFields:", actionFields); var actionFieldsCount = actionFields.length; for (var i = 0; i < actionFieldsCount; i++) { var command = actionFields[i][0]; // console.log("command:", command); var updateMap = actionFields[i][1]; // console.log("updateMap:", updateMap, EJSON.stringify(updateMap)); var inverse; if (typeof opt === 'undefined' || typeof opt.inverse === 'undefined') { // var fieldName = _.keys(actionField[0][1])[0]; // console.log(fieldName); if (typeof opt === 'undefined') { opt = {}; } // console.log("inverse called with (collection, existingDoc, updateMap, opt):", collection, existingDoc, updateMap, opt); inverse = _.isFunction(tx.inverseOperations[command]) && tx.inverseOperations[command].call(self, collection, existingDoc, updateMap, opt) || tx._inverseUsingSet(collection, existingDoc, updateMap, opt); // console.log("inverse:", inverse); } else { // This "opt.inverse" thing is only used if you need to define some tricky inverse operation, but will probably not be necessary in practice // a custom value of opt.inverse needs to be an object of the form: // {command: "$set", data: {fieldName: value}} inverse = opt.inverse; } // console.log("inverse:", inverse); self._setContext((opt && opt.context) || tx.makeContext('update', collection, existingDoc, updates)); var updateData = {command: command, data: updateMap}; if (opt && opt.instant) { try { self._doUpdate(collection, _id, updates, updateData, inverse, true, opt, callback, (i === (actionFieldsCount - 1)) ? true : false); self.log("Executed instant update"); // true param is to record this as an instant change } catch(err) { self.log(err); self.log("Rollback initiated by instant update command"); this._rollback = true; this._rollbackReason = 'update-error'; } } else { (function (updateData, inverse, execute) { // console.log('updateData, inverse, execute:', updateData, inverse, execute); self._doUpdate(collection, _id, updates, updateData, inverse, false, opt, callback, execute); self.log("Pushed update command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' }).call(this, updateData, inverse, (i === (actionFieldsCount - 1)) ? true : false); } } this._closeAutoTransaction(opt, callback); return !this._rollback; // Update was executed or queued for execution } else { this._rollback = true; this._rollbackReason = 'permission-denied'; self.log("Insufficient permissions to update this document in " + collection._name + ':', existingDoc); // Permission to update not granted return; } } /** * Cancels a transaction, but doesn't roll back immediately * When the transaction is committed, no queued actions will be executed * and any instant updates, inserts or removes that were made will be rolled back */ Transact.prototype.cancel = function () { this.log('Transaction cancelled'); this._rollback = true; this._rollbackReason = 'transaction-cancelled'; } /** * Undo the last transaction by the user */ Transact.prototype.undo = function (txid, callback) { var self = this; var callback = (_.isFunction(txid)) ? txid : callback; Meteor.call("_meteorTransactionsUndo", (_.isString(txid)) ? txid : null, function (err, res) { if (Meteor.isClient && res && _.isFunction(tx.onTransactionExpired)) { tx.onTransactionExpired.call(self, err, res); } if (_.isFunction(callback)) { callback.call(self, err, !res); } }); } /** * Redo the last transaction undone by the user */ Transact.prototype.redo = function (txid, callback) { var self = this; var callback = (_.isFunction(txid)) ? txid : callback; Meteor.call("_meteorTransactionsRedo", (_.isString(txid)) ? txid : null, function (err, res) { if (Meteor.isClient && res && _.isFunction(tx.onTransactionExpired)) { tx.onTransactionExpired.call(); } if (_.isFunction(callback)) { callback.call(self, err, !res); } }); } /** * Manually add context to current transaction using _.extend (equivalent of lodash.assign) */ Transact.prototype.setContext = function (context) { this._setContext(context); } /** * Manually add context to current transaction using lodash.merge */ Transact.prototype.mergeContext = function (context) { tx.lodash.merge(this._context, context); } /** * Manually add context to current transaction using lodash.set */ Transact.prototype.setContextPathValue = function (path, value) { tx.lodash.set(this._context, path, value); } /** * Fetch context of the current transaction */ Transact.prototype.getContext = function (context) { return this._context; } // ********************************************************** // INTERNAL METHODS - NOT INTENDED TO BE CALLED FROM APP CODE // ********************************************************** Transact.prototype.log = function (message) { tx.log(((tx.showConnection) ? '[Connection: ' + this._connectionId + '] ' : '') + message); } Transact.prototype._doInsert = function (collection, newDoc, opt, callback) { // The following is a very sketchy attempt to support collection2 options // Requires aldeed:collection2 to be before babrahams:transactions in .packages // which we do through a weak dependency on aldeed:collection2 if (this._Collection2Support(collection, opt)) { // This is a brutal workaround to allow collection2 options to do their work var newId = null; var error = null; var returnedId = collection.insert(newDoc, opt, function (err, newId) { if (!err) { newId = newId; } else { error = err; } if (_.isFunction(callback)) { // Let the app handle the error via its own callback callback(error, newId); } }); if (returnedId) { return returnedId; } else { throw new Meteor.Error('Insert failed: reason unknown.', ''); } } else { return collection.insert(newDoc, callback); } } Transact.prototype._doRemove = function (collection, _id, sel, instant, opt, callback) { if (!_.isFunction(callback)) { callback = undefined; } var self = this; if ((opt && typeof opt.softDelete !== 'undefined' && opt.softDelete) || (opt && typeof opt.softDelete === 'undefined' && tx.softDelete) || (typeof opt === 'undefined' && tx.softDelete)) { self._pushToRecord("remove", collection, _id, null, instant, self._permissionCheckOverridden(opt)); if (instant) { var item = self._createItem("remove", collection, _id, null, instant, self._permissionCheckOverridden(opt)); self._recordTransaction(item); collection.update(sel, {$set: {deleted: ServerTime.date(), transaction_id: self._transaction_id}}, callback); } return; } var fields = {hardDelete: true}; // Hard delete document if (instant) { var fullDoc = collection.findOne(sel); // , {transform: null} fields.doc = fullDoc; if (!fullDoc) { // There is no existing document to remove // Return without creating an item in the queue or recording it self.log("Document not found for removal:", sel); } else { fields.doc.transaction_id = self._transaction_id; fields.doc.deleted = ServerTime.date(); } var item = self._createItem("remove", collection, _id, fields, instant, self._permissionCheckOverridden(opt)); self._recordTransaction(item); collection.remove(sel, callback); } self._pushToRecord("remove", collection ,_id, fields, instant, self._permissionCheckOverridden(opt)); // null is for field data (only used for updates) and true is to mark this as an instant change } Transact.prototype._doUpdate = function (collection, _id, updates, updateData, inverseData, instant, opt, callback, execute) { // console.log("collection, _id, updates, updateData, inverseData, instant, opt, callback, execute", collection, _id, updates, updateData, inverseData, instant, opt, callback, execute); var self = this; if (instant) { if (execute) { if (!_.isFunction(callback)) { callback = undefined; } if (_.isObject(updates["$set"])) { _.extend(updates["$set"], {transaction_id: self._transaction_id}); } else { updates["$set"] = {transaction_id: self._transaction_id}; } // This error, handler business is to allow collection2 `filter:false` to do its work var error = null; var handler = function (err, res) { if (err) { error = err; } if (_.isFunction(callback)) { callback(err, res); } } var item = self._createItem("update", collection, _id, {update: self._packageForStorage(updateData), inverse: self._packageForStorage(inverseData)}, instant,self._permissionCheckOverridden(opt)); self._recordTransaction(item); // No need to check for aldeed:collection2 support (like we do for inserts) // as we can pass an options hash to an update if (_.isObject(opt)) { collection.update({_id:_id}, updates, opt, handler); } else { collection.update({_id:_id}, updates, handler); } if (error) { throw new Meteor.Error('Update failed: ' + error.message, error.reason); return; } delete updates["$set"].transaction_id; } } // console.log(JSON.stringify({update: self._packageForStorage(updateData)}), JSON.stringify({inverse: self._packageForStorage(inverseData)})); self._pushToRecord("update", collection, _id, {update: self._packageForStorage(updateData), inverse: self._packageForStorage(inverseData)}, instant, self._permissionCheckOverridden(opt)); } // This is used only if an {instant: true} parameter is passed Transact.prototype._recordTransaction = function (item) { if (!Transactions.findOne({_id: this._transaction_id})) { // We need to get a real transaction in the database for recoverability purposes var user_id = tx._userOrNull(); Transactions.insert({ _id: this._transaction_id, user_id: user_id, lastModified: ServerTime.date(), description: this._description, context: this._context, state: "pending" }); } Transactions.update({_id: this._transaction_id}, {$addToSet: {items: item}}); } // This is used to check that the document going into the transactions collection has all the necessary fields Transact.prototype._validateModifier = function (modifier, txid) { var fields = modifier && modifier["$addToSet"]; if (!fields) { return null; } return this._checkTransactionFields([fields.items], txid); } Transact.prototype._checkTransactionFields = function (items, txid, cannotOverridePermissionCheck) { // Iterate over all the items that are going to be stored on the transaction stack and check their legitimacy if (!items || !items.length) { return false; } var self = this, recombinedUpdateFields = {}, recombinedInverseFields = {}; var action, collection, doc, details, inverseDetails, fail = false, previouslyNonExistingDoc = {}; _.each(items, function (value) { if (!fail) { collection = value.collection; // Watch out for undo method validation of a remove, where the doc has been hard removed from the collection // Allowing a doc through that was passed from the client is a potential security hole here, but without soft delete, there is no actual record of that doc // So we check that the removed doc's transaction_id value matches the txid if (value.action === 'remove' && value.doc && value.doc.transaction_id === txid) { doc = value.doc; } else if (value.action === 'insert' && value.doc && value.doc.transaction_id === txid) { // Handle redo of an insert (after a previous undo) doc = value.doc; previouslyNonExistingDoc[doc._id] = doc; } else { doc = previouslyNonExistingDoc[value._id] || tx.collectionIndex[collection].findOne({_id: value._id}); } if (Meteor.isClient && !doc) { // Because this runs in a client simulation, // where a removed document may not be in the collection // or we simply may not be subscribed to that doc anymore // we need to consider and watch out for this // we'll extend the benefit of the doubt and let the server handle the real check return; } if (value.action === 'update') { action = 'update'; details = value.update; inverseDetails = value.inverse; recombinedUpdateFields[details.command] = self._unpackageForUpdate(details.data); recombinedInverseFields[inverseDetails.command] = self._unpackageForUpdate(inverseDetails.data); if (!(value.noCheck && !cannotOverridePermissionCheck)) { // Transactions that have been allowed using overridePermissionCheck are considered here, using the noCheck flag // Otherwise the user won't be able to undo them try { fail = !(self._permissionCheck(action, tx.collectionIndex[collection], doc, recombinedUpdateFields) && self._permissionCheck(action, tx.collectionIndex[collection], doc, recombinedInverseFields)); } catch (err) { fail = true; } } return; } else if (value.action === 'insert') { action = 'insert'; } else if (value.action === 'remove' ) { action = 'remove'; } if (!(value.noCheck && !cannotOverridePermissionCheck)) { try { fail = !self._permissionCheck(action, tx.collectionIndex[collection], doc, {}); } catch (err) { // If this transaction was made possible by overridePermissionCheck // It may not be undo/redo-able, unless we follow the same rules (i.e. abide by the noCheck flag) fail = true; } } } }); return !fail; } // Reset everything to a clean state Transact.prototype._cleanReset = function () { this._transaction_id = null; this._autoTransaction = false; this._items = []; this._startAttempts = 0; this._granted = {}; this._rollback = false; this._rollbackReason = ''; this._context = {}; this._description = ''; // Note: we don't reset this._lastTransactionData because we want it to be available AFTER the commit Meteor.clearTimeout(this._autoCancel); } Transact.prototype._callback = function (a, b, err, res) { var c = (_.isFunction(a)) ? a : ((_.isFunction(b)) ? b : null); if (c) { c.call(this._lastTransactionData, err, res); } } // Starts a transaction automatically if one isn't started already Transact.prototype._openAutoTransaction = function (description) {// console.log("Auto open check value for transaction_id: " + this._transaction_id + ' (Auto: ' + this._autoTransaction + ')'); if (!this._transaction_id) { this._autoTransaction = true; this._description = description; this.start(description); // console.log("Auto opened: " + this._transaction_id + ' (Auto: ' + this._autoTransaction + ')'); } } // Commits a transaction automatically if it was started automatically Transact.prototype._closeAutoTransaction = function (opt, callback, newId) {// console.log("Auto commit check value for autoTransaction: " + this._autoTransaction + ' (Auto: ' + this._autoTransaction + ')'); if (this._autoTransaction) { this.log("Auto committed: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')'; this.commit(opt, undefined, newId); } } // Cancels and commits a transaction automatically if it exceeds the idleTimeout threshold with no new actions Transact.prototype._resetAutoCancel = function (isStartOnClient) { if (Meteor.isServer || isStartOnClient) { var self = this; if (Meteor.isServer) { Meteor.clearTimeout(this._autoCancel); } else { clearTimeout(this._autoCancel); } // It would be nice to use Meteor.clearTimeout and Meteor.setTimeout // but we need to sidestep the rule that timers can't be // started in simulations (which is what Meteor.setTimeout checks for) var onTimeOut = function () { self.log('Transaction (' + self._transaction_id + ') was cancelled after being inactive for ' + (tx.idleTimeout / 1000) + ' seconds.'); self.rollback(); }; this._autoCancel = (Meteor.isServer) ? Meteor.setTimeout(onTimeOut, tx.idleTimeout) : setTimeout(onTimeOut, tx.idleTimeout); } } // Pushes the record of a single action to the "items" sub document that is going to be recorded in the transactions collection along with data about this transaction Transact.prototype._pushToRecord = function (type, collection, _id, fieldData, instant, permissionCheckOverridden) { var item = this._createItem(type, collection, _id, fieldData, instant, permissionCheckOverridden); this._items.push(item); this._resetAutoCancel(); } // Create item for queue Transact.prototype._createItem = function (type, collection, _id, fieldData, instant, permissionCheckOverridden) { var item = {collection:collection._name, _id:_id, action:type, state: "pending"}; if (typeof instant !== 'undefined' && instant) { item.instant = true; item.state = "done"; } if (typeof permissionCheckOverridden !== 'undefined' && permissionCheckOverridden) { item.noCheck = true; } if (typeof fieldData !== "undefined" && fieldData) { _.extend(item, fieldData); } return item; } // Checks whether the permission check should be waived Transact.prototype._permissionCheckOverridden = function (opt) { return typeof opt !== 'undefined' && opt.overridePermissionCheck; } // Uses a user-defined permission check as to whether this action is allowed or not Transact.prototype._permissionCheck = function (action, collection, doc, updates) { // insert and remove send null for "updates" param, but this is where all the details of any update are found return tx.checkPermission(action, collection, doc, updates); } // Builds the context object Transact.prototype._setContext = function (context) { _.extend(this._context, context); } // This turns the data that has been stored in an array of key-value pairs into an object that mongo can use in an update Transact.prototype._unpackageForUpdate = function (data) { var objForUpdate = {}; _.each(data, function (val) { var unpackagedValue; if (val.json) { unpackagedValue = EJSON.parse(val.json); } else { unpackagedValue = val.value; } objForUpdate[val.key] = unpackagedValue; }); return objForUpdate; } // This turns the data that is given as a mongo update into an array of key-value pairs that can be stored Transact.prototype._packageForStorage = function (update) { var arrForStorage = []; _.each(update.data, function (value, key) { var packagedValue = {}; if ((_.isObject(value) || _.isArray(value)) && !_.isDate(value)) { // a date is considered an object by _.isObject (because it is, I suppose) packagedValue.json = EJSON.stringify(value); } else { packagedValue.value = value; } arrForStorage.push(_.extend({key: key}, packagedValue)); }); return {command: update.command, data: arrForStorage}; } Transact.prototype._Collection2Support = function (collection, opt) { // The following is a very sketchy attempt to support collection2 options // Requires aldeed:collection2 to be before babrahams:transactions in .packages // which we do through a weak dependency on aldeed:collection2 return _.isFunction(collection.simpleSchema) && collection.simpleSchema() !== null && _.find([ "validationContext", "validate", "filter", "autoConvert", "removeEmptyStrings", "getAutoValues", "replace", "upsert", "extendAutoValueContext", "trimStrings", "extendedCustomContext", "transform" ], function (c2option) { return typeof opt[c2option] !== "undefined"; } ); } Transact.prototype._changeItemState = function (data) { // Need to make a write to the transaction record, marking this action as `done` var m = {}; m["items." + data.index + ".state"] = data.state; Transactions.update({_id: data.txid}, {$set: m}); } Transact.prototype._userOrNull = function () { var userId = null; // Need the try-catch because we don't know if this is getting called from a method or from plain server code // If from plain server code, it will throw an error try { userId = _.isFunction(Meteor.userId) && Meteor.userId(); } catch (err) { return null; } return userId; } /** * Actually execute the transaction - i.e. make the db writes */ Transact.prototype._processTransaction = function (txid, description, items, context, cannotOverridePermissionCheck) { check(txid,String); check(description,String); check(items, Array); check(context, Object); check(cannotOverridePermissionCheck, Boolean); if (items && items.length && !tx._checkTransactionFields(items, txid, cannotOverridePermissionCheck)) { throw new Meteor.Error('Transaction not allowed'); // TODO -- we need a bit of a better error message than this! return; } // Here is where we need to execute the 2-phase commit // We begin by setting the transaction document with all write info to a state of pending var existingTransaction = Transactions.findOne({_id: txid}); if (existingTransaction) { // throw new Meteor.Error('Transaction with duplicate _id found'); // return; // This is here because we have some {instant: true} calls // Overwrite the items field with the full complement of items in the queue // Also overwrite context with context from all item calls, // and any manual tx.setContext calls Transactions.update({_id: txid}, {$set: {items: items, context: context, description: description, lastModified: ServerTime.date()}}); } // First, need to iterate over the changes that are going to be made and make sure that, // if there are hard removes, the db version of the doc gets stored on the transaction _.each(items, function (item, index) { if (item.action === "remove" && item.hardDelete) { // Get the existing doc and store it in the transaction record // We overwrite the temporary version of the doc from an instant remove on the client // Because chances are that the whole document was not available on the client var Collection = tx.collectionIndex[item.collection]; var doc = Collection.findOne({_id: item._id}); // , {transform: null} items[index].doc = doc; } }); // STEP 1 - Set initial state of transaction to "pending" if (!existingTransaction && !Transactions.insert({_id: txid, user_id: tx._userOrNull(), description: description, items: items, context: context, lastModified: ServerTime.date(), state: "pending"})) { throw new Meteor.Error('Unable to commit transaction'); return; } // STEP 2 - Make changes specified by items in the queue var success = true; var self = this; var logErrors = function (err) { self.log(err.toString()); }; var updateCache = {}; var cacheValues = function (item, index) { _.each(item.update.data, function (keyValuePair, i) { if (_.isUndefined(updateCache[item.collection])) { updateCache[item.collection] = {}; } if (_.isUndefined(updateCache[item.collection][item._id])) { updateCache[item.collection][item._id] = {}; } // If there's an item in the update cache, we need to overwrite the transaction record now // Because we know it probably has the wrong inverse value if (!_.isUndefined(updateCache[item.collection][item._id][keyValuePair.key])) { var mod = tx._unpackageForUpdate([{ key: "items." + index + ".inverse.data." + i + ".value", value: updateCache[item.collection][item._id][keyValuePair.key] }]); var update = {$set: mod}; if (cannotOverridePermissionCheck) { update["$unset"] = {noCheck: 1}; } Transactions.update({_id: txid}, update); } updateCache[item.collection][item._id][keyValuePair.key] = keyValuePair.value; }); } var newIdValues = {}; _.each(items, function (item, index) { if (success) { try { if (item.instant) { // Already done -- don't do it again if (item.action === 'update') { // Cache values cacheValues(item, index); } return; } var Collection = tx.collectionIndex[item.collection]; var txData = {transaction_id: txid}; switch (item.action) { case 'insert' : // Will run synchronously. If this fails an exception will be thrown var newId = Collection.insert(_.extend(item.doc, {_id: item._id}, txData)); // The insert succeeded items[index].state = 'done'; self.log("Executed insert"); break; case 'update' : var modifier = {}; var data = tx._unpackageForUpdate(item.update.data); modifier[item.update.command] = data; if (modifier["$set"]) { // Add to the $set modifier modifier["$set"] = _.extend(modifier["$set"], txData); } else { // Add a $set modifier modifier["$set"] = txData; } // Will run synchronously. If this fails an exception will be thrown Collection.update({_id: item._id}, modifier); // The update succeeded // Cache values if (item.update.command === "$set") { cacheValues(item, index); } items[index].state = 'done'; self.log("Executed update"); break; case 'remove' : if (item.hardDelete) { // Remove the whole document // Will run synchronously. If this fails an exception will be thrown var removed = Collection.remove({_id: item._id}); // The remove succeeded items[index].state = 'done'; self.log('Executed remove'); } else { // Just do a soft delete // Will run synchronously. If this fails an exception will be thrown Collection.update({_id: item._id}, {$set: _.extend(txData, {deleted: ServerTime.date()})}); // The remove succeeded items[index].state = 'done'; self.log('Executed remove'); } break; default : // Do nothing } } catch (err) { success = false; logErrors(err); } } if (success) { tx._changeItemState({ txid: txid, index: index, state: 'done' }); } }); // STEP 3 - Set state to "done" if (success) { var self = this; Transactions.update({_id: txid}, {$set: {state: "done", lastModified: ServerTime.date()}}, function (err, res) { if (err) { self.log('Could not complete transaction:', txid, err); success = false; } }); if (success) { var finalTxRecord = Transactions.findOne({_id: txid}); return {items: finalTxRecord.items}; } } else { self.log('Transaction failed'); // Need to run the items through a rollback with actual inverse writes var rollbackAllDoneItems = true; tx._transaction_id = txid; tx._items = items; tx._description = description; tx._context = context; tx.rollback.call(tx, rollbackAllDoneItems); } } // ******* // Methods // ******* // These are the methods that actually do the commits and undo and redo work // They would usually not be called directly -- but invoked using tx.undo() and tx.redo() // Although these methods are pretty large, we're including them on both client and server // because we want to maintain latency compensation on the client Meteor.methods({ '_meteorTransactionsProcess' : function (txid, description, items, context) { check(txid,String); check(description,String); check(items, Array); check(context, Object); var cannotOverridePermissionCheck = true; return tx._processTransaction(txid, description, items, context, cannotOverridePermissionCheck); }, '_meteorTransactionsUndo' : function (txid) { check(txid,Match.OneOf(String,null,undefined)); if (tx.requireUser && !Meteor.userId()) { console.log('You must be logged in to undo actions.'); return; } // Get the latest transaction done by this user and undo it var expired = false; var queuedItems = []; var selector = (txid) ? { _id: txid} : {user_id: tx._userOrNull()}; var sorter = (txid) ? undefined : {sort: {lastModified: -1}, limit:1}; var lastTransaction = Transactions.find(_.extend(selector, {$or: [{undone: null}, {undone: {$exists: false}}], expired: {$exists: false}, state: "done"}), sorter).fetch()[0]; if (lastTransaction && typeof lastTransaction.items !== 'undefined') { // Check that user still has permission to edit all these items // Undo in reverse order // e.g. Need to undo removes first, so that docs are available for undo updates if docs were updated before removal if (tx._checkTransactionFields(lastTransaction.items, lastTransaction._id)) { _.each(lastTransaction.items.reverse(), function (obj, index) { if (obj.action === 'remove') { if (!expired) { if (obj.doc) { // This doc is here because the original was removed // First check for duplicates -- if there is one, the transaction has expired if (tx.collectionIndex[obj.collection].find(obj.doc._id).count()) { expired = true; } else { queuedItems.push(function () { tx.collectionIndex[obj.collection].insert(obj.doc); }); } } else { // This was removed with softDelete queuedItems.push(function () { tx.collectionIndex[obj.collection].update({_id: obj._id}, {$unset: {deleted: 1, transaction_id: lastTransaction._id}}); }); } } } if (obj.action === 'update') { if (!expired) { if (typeof obj.inverse !== 'undefined' && obj.inverse.command && obj.inverse.data) { var operation = {}; operation[obj.inverse.command] = tx._unpackageForUpdate(obj.inverse.data); // console.log('inverse operation:'+EJSON.stringify(operation)); queuedItems.push(function () { tx.collectionIndex[obj.collection].update({_id: obj._id}, operation); /* console.log("operation called:"+EJSON.stringify(operation)); */ }); } } } if (obj.action === 'insert') { if (!expired) { var sel = {_id: obj._id}; // This transaction check is in case the document has been subsequently edited -- in that case, we don't want it removed from the database completely // Instead, we remove this transaction from the visible list by setting expired to true sel.transaction_id = lastTransaction._id; queuedItems.push(function () { tx.collectionIndex[obj.collection].remove(sel); } ); if (tx.collectionIndex[obj.collection].findOne({_id: obj._id, $and: [{transaction_id: {$exists: true}}, {transaction_id: {$ne: lastTransaction._id}}]})) { // Transaction has expired expired = true; // This is to tell the client that the transaction has expired and the undo was not executed } } } }); if (!expired) { // Process queue _.each(queuedItems,function (queuedItem, index) { var fail = false; try { queuedItem.call(); } catch (err) { fail = true; } if (!fail) { tx._changeItemState({ txid: lastTransaction._id, index: (queuedItems.length - 1) - index, // Because array has been reversed for undo state: 'undone' }); } }); // After an undo, we need to update transaction document Transactions.update({_id: lastTransaction._id}, {$set: {undone: ServerTime.date(), state: 'undone'}}); } } else { // Non-empty transaction, but user has lost the permission to edit at least one of the items encompassed by the transaction expired = true; } if (expired) { // Flag this as expired in the db to keep it out of the user's undo/redo stack Transactions.update({_id: lastTransaction._id}, {$set: {expired: true}}); } } else if (lastTransaction) { // Auto clean - this transaction is empty Transactions.remove({_id: lastTransaction._id}); } return expired; // If the function returns true, the undo failed }, '_meteorTransactionsRedo' : function (txid) { check(txid,Match.OneOf(String, null, undefined)); if (tx.requireUser && !Meteor.userId()) { console.log('You must be logged in to redo actions.'); return; } // Get the latest undone transaction by this user and redo it var expired = false; var queuedItems = []; var selector = (txid) ? {_id: txid} : {user_id: tx._userOrNull()}; var sorter = (txid) ? undefined : {sort: {undone: -1}, limit: 1}; var lastUndo = Transactions.find(_.extend(selector, {undone: {$exists: true, $ne: null}, expired: {$exists: false}}), sorter).fetch()[0]; if (lastUndo && typeof lastUndo.items !== 'undefined') { // Check that user still has permission to edit all these items if (tx._checkTransactionFields(lastUndo.items, lastUndo._id)) { _.each(lastUndo.items, function (obj, index) { if (obj.action === "remove") { if (obj.doc) { // This document was removed using a hard delete the first time // We'll hard delete again, making no attempt to save any modifications that have happened to the document in the interim queuedItems.push(function () { tx.collectionIndex[obj.collection].remove({_id: obj._id})}); } else { queuedItems.push(function () { tx.collectionIndex[obj.collection].update({_id: obj._id}, {$set: {deleted: ServerTime.date(), transaction_id: lastUndo._id}})}); } } if (obj.action === "update") { if (typeof obj.update !== 'undefined' && obj.update.command && obj.update.data) { var operation = {}; operation[obj.update.command] = tx._unpackageForUpdate(obj.update.data);// console.log(operation); queuedItems.push(function () { tx.collectionIndex[obj.collection].update({_id: obj._id}, operation); }); } } if (obj.action === "insert") { if (!expired) { if (!tx.collectionIndex[obj.collection].find({_id: obj._id}).count()) { var newDoc = _.extend(obj.doc, {transaction_id: lastUndo._id,_id: obj._id}); queuedItems.push(function () { tx.collectionIndex[obj.collection].insert(newDoc) }); } else { // This is an edited doc that was not removed on last undo // Transaction has expired expired = true; // This is to tell the client that the transaction has expired and the redo was not executed } } } }); if (!expired) { // Process queue _.each(queuedItems, function (queuedItem, index) { var fail = false; try { queuedItem.call(); } catch (err) { fail = true; } if (!fail) { tx._changeItemState({ txid: lastUndo._id, index: index, state: 'done' }); } }); // After a redo, we need to update the transaction document Transactions.update({_id: lastUndo._id}, {$unset: {undone: 1}, $set: {state: 'done'}}); // ,$set:{lastModified: ServerTime.date()} -- LEADS TO UNEXPECTED RESULTS } } else { // User no longer has permission to edit one of the items in this transaction expired = true; } if (expired) { // Flag this transaction as expired to keep it out of the user's undo-redo stack Transactions.update({_id: lastUndo._id}, {$set: {expired: true}}); } } return expired; // If the function returns true, the redo failed } }); // Wrap DB write operation methods // Wrapping technique shamelessly stolen from aldeed:collection2 codebase // (https://github.com/aldeed/meteor-collection2/blob/master/collection2.js) and modified for this package // backwards compatibility if (typeof Mongo === "undefined") { Mongo = {}; Mongo.Collection = Meteor.Collection; } _.each(['insert', 'update', 'remove'], function (methodName) { var _super = Mongo.Collection.prototype[methodName]; Mongo.Collection.prototype[methodName] = function () { var self = this, args = _.toArray(arguments); // self is the Mongo.Collection instance var optionsArg = (methodName === 'update') ? 2 : 1; if (_.isObject(args[optionsArg]) && args[optionsArg].tx) { args.unshift(self); return tx[methodName].apply(tx, args); } return _super.apply(self, args); }; }); // Here we ensure the the tx object is aware of the apps collections and can access them by name // we use dburles:mongo-collection-instances package to do this. // We also check for the presence of SimpleSchema and extend the schema of existing // collections to allow for the fields that transactions will add to documents Meteor.startup(function () { Meteor.defer(function () { // Auto detect collections tx.collectionIndex = (_.isEmpty(tx.collectionIndex)) ? _.reduce(Mongo.Collection.getAll(), function (memo, coll) { memo[coll.name] = coll.instance; return memo; }, {}) : tx.collectionIndex; // Built in support for simple-schema/collection2 if (typeof SimpleSchema !== 'undefined') { _.each(tx.collectionIndex, function (collection) { if (_.isFunction(collection.simpleSchema) && collection.simpleSchema() !== null && collection._c2) { collection.attachSchema({deleted: {type: Date, label: "Deleted", optional: true}, transaction_id: {type:String, label: "transaction_id", optional: true}, _id: {type: String, label: "_id", optional: true}}); } }); if (_.isFunction(tx.Transactions.attachSchema)) { var userPattern = { type:String, label:"User Id" } if (!tx.requireUser) { userPattern.optional = true; } var TransactionSchema = new SimpleSchema({ "context": { type:Object, label:"Context", blackbox:true, optional:true }, "description": { type:String, label:"Description" }, "items": { type:[Object], label:"Items", blackbox:true, optional:true }, "lastModified": { type:Date, label:"Timestamp" }, "undone": { type:Date, label:"Undone", optional:true }, "user_id": userPattern, "expired": { type:Boolean, label:"Expired", optional:true }, "state": { type:String, label:"state" } }); tx.Transactions.attachSchema(TransactionSchema); } } }); });
lib/transactions-common.js
// ******************************* // Transactions for Meteor + Mongo // v0.8.0 // by Brent Abrahams // [email protected] // MIT Licence 2017 // ******************************* // This package adds one new mongo collection to your app // It is exposed to the app via `tx.Transactions`, not via `Transactions` // In much the same way that we have `Meteor.users` rather than `Users` Transactions = new Mongo.Collection(Meteor.settings && Meteor.settings.transactionsCollection || "transactions"); if (Meteor.isServer) { Transactions.allow({ insert: function (userId, doc) { return (_.has(doc, "items") || doc.user_id !== userId) ? false : true; }, update: function (userId, doc, fields, modifier) { if (userId !== doc.user_id) { // TODO -- this condition will need to be modified to allow an admin to look through transactions and undo/redo them from the client // That said, an admin interface shouldn't really be messing with the transactions collection from the client anyway, so ignoring this for now return false; } else { if (tx._validateModifier(modifier, doc._id)) { return true; } else { // Transactions.remove({_id:doc._id}); return false; } } }, remove: function (userId, doc) { var fullDoc = Transactions.findOne({_id: doc._id}); return fullDoc && fullDoc.user_id === userId; } }); } TransactionManager = function () { // This is instantiated as `tx` and maintains a map that points to the correct instance of `Transact` // It also carries all of the globally configurable parts of the `tx` object, which intances of `Transact` will reference // The per-transaction methods that this manager needs to direct to the correct `Transact` instance are: // tx.start // tx.insert // tx.update // tx.remove // tx.commit // tx.cancel // tx.rollback // ******************************************************* // THIS IS THE MAP FROM CONNECTION TO TRANSACTION INSTANCE // ******************************************************* this._txMap = {}; // This ensures that the map contains a `Transact` instance for each connection and also for the default (when there is no connection) this._instance = function () { var instanceKey = 'default'; var currentInvocation = DDP._CurrentInvocation; if (_.isFunction(currentInvocation.get)) { var ci = currentInvocation.get(); if (ci && ci.connection && ci.connection.id) { instanceKey = ci.connection.id; } } var instance = this._txMap[instanceKey]; if (!instance) { this._txMap[instanceKey] = new Transact(); // Let the instance know its own connection_id this._txMap[instanceKey]._connectionId = instanceKey; instance = this._txMap[instanceKey]; } return instance; } // ********************************************************************************************************** // YOU CAN OPTIONALLY OVERWRITE tx.collectionIndex TO MAKE THE TRANSACTIONS WORK FOR CERTAIN COLLECTIONS ONLY // ********************************************************************************************************** // e.g. in a file shared by client and server, write: // tx.collectionIndex = {posts:Posts,comments:Comments, etc...} // where the key is the name of the database collection (e.g. 'posts') and the value is the actual Meteor Mongo.Collection object (e.g. Posts) // by default, all collections are added to tx.collectionIndex on startup in a "Meteor.startup(function (){ Meteor.defer(function () { ..." block // so if you are planning to overwrite tx.collectionIndex, you'll also need to wrap your code in "Meteor.startup(function (){ Meteor.defer(function () { ..." block // there's no real need to do this for most applications though this.collectionIndex = {}; // ******************************************************************** // YOU CAN OPTIONALLY OVERWRITE ANY OF THESE (e.g. tx.logging = false;) // ******************************************************************** // Recommendations about _where_ to overwrite the following attributes/methods are given as [BOTH], [SERVER], or [CLIENT] // Turn logging on or off // [BOTH] this.logging = true; // By default, messages are logged to the console // [BOTH] this.log = function () { if (this.logging) { _.each(arguments, function (message) { console.log(message); }) } }; // Because most/many db writes will come through the transaction manager, this is a good place to do some permission checking // NOTE: this permission check is the only thing standing between a user and the database if a transaction is committed from a method with no surrounding permission checks of its own // NOTE: if you're commiting transactions from the client, you'll definitely need this as writes do not go through your allow and deny rules // [BOTH] this.checkPermission = function (command, collection, doc, modifier) { return true; }; // commands are "insert", "update", "remove" // For the purpose of filtering transactions later, a "context" field is added to each transaction // By default, we don't do anything -- the context is empty, but there are probably certain fields in the document that we could record to use for filtering. // Remember, if there are multiple document being processed by a single transaction, the values from the last document in the queue will overwrite values for fields that have taken a value from a previous document - last write wins // OVERWRITING THIS WITH tx.makeContext = function () { ... } IS STRONGLY RECOMMENDED // [BOTH] this.makeContext = function (command, collection, doc, modifier) { return {}; }; // TODO -- detect routes and add the path to the context automatically (separate, per router, packages) // If requireUser is set to false, any non-logged in user, using the `babrahams:undo-redo` package gets the undo-redo stack of any non-logged-in user // For security purposes this should always be set to true in real apps that use the `babrahams:undo-redo` package // [BOTH] this.requireUser = false; // This function is called on the client when the user tries to undo (or redo) a transaction in which some (or all) documents have been altered by a later transaction // [CLIENT] this.onTransactionExpired = function () { alert('Sorry. Other edits have been made, so this action can no longer be reversed.'); }; // If app code forgets to close a transaction on the server, it will autoclose after the following number of milliseconds // If a transaction is open on the client, it just stays open indefinitely // [BOTH] this.idleTimeout = 5000; // If something goes wrong and a transaction isn't closed it remains open (on the client - TODO - make the client time out the same way the server does) // This is also a problem on the server, as a transaction will remain open until the it times out // and meanwhile other transactions can't be started and committed // The following can be set to true to stop this from happening, but it means you have to be strict about coding your logic // so that only one transaction is started at any given time // [BOTH] this.forceCommitBeforeStart = false; // By default, documents are hard deleted and a snapshot of the document the moment before deletion is stored for retrieval in the transaction document // This is much more prone to causing bugs and weirdness in apps, e.g. if a removed doc is restored after documents it depends on have been removed // (and whose removal should normally have caused the restored doc to have also been removed and tagged with the same transaction_id) // It's safer to turn softDelete on for complex apps, but having it off makes this package work out of the box better, as the user doesn't have to // use `,deleted:{$exists:false}` in their `find` and `findOne` selectors to keep deleted docs out of the result // [BOTH] this.softDelete = false; // This flag tells the package how to deal with incomplete transactions, undos and redos on startup // The possible modes are: // `complete` - which will try to complete any pending transactions // `rollback` - which will try to restore db to state prior to the pending transaction // any other value will just leave the db in the state it was left at last shutdown // [SERVER] this.selfRepairMode = 'complete'; // If you want to limit the volume of rubbish in the transactions collection // you can set `tx.removeRolledBackTransactions = true` // It is false by default because having a record in the db helps with debugging // [BOTH] this.removeRolledBackTransactions = false; // Overwrite this with something like `tx.lodash = lodash` if you have a lodash package installed // This gives you access to `tx.mergeContext` and `tx.setContextPathValue` this.lodash = null; // The following function `_inverseUsingSet` is necessary for `inverseOperations` // It is not intended to be a part of the public API // Default inverse operation that uses $set to restore original state of updated fields this._inverseUsingSet = function (collection, existingDoc, updateMap, opt) { var self = this, inverseCommand = '$set', formerValues = {}; _.each(_.keys(updateMap), function (keyName) { // Given a dot delimited string as a key, and an object, find the value var _drillDown = function (obj, key) { return Meteor._get.apply(null, [obj].concat(key.split('.'))); // Previous implementation, which worked fine but was more LOC than necessary /*var pieces = key.split('.'); if (pieces.length > 1) { var newObj = obj ? obj[pieces[0]] : {}; pieces.shift(); return this._drillDown(newObj,pieces.join('.')); } else { if (obj) { return obj[key]; } else { return; // undefined } }*/ } var formerVal = _drillDown(existingDoc || {}, keyName); if (typeof formerVal !== 'undefined') { // Restore former value inverseCommand = '$set'; formerValues[keyName] = formerVal; } else { // Field was already unset, so just $unset it again inverseCommand = '$unset'; formerValues[keyName] = 1; } }); return {command: inverseCommand, data: formerValues}; }; // Functions to work out the inverse operation that will reverse a single collection update command. // This default implementation attempts to reverse individual array $set and $addToSet operations with // corresponding $pull operations, but this may not work in some cases, eg: // 1. if a $addToSet operation does nothing because the value is already in the array, the inverse $pull will remove that value from the array, even though // it was there before the $addToSet action // 2. if a $addToSet operation had a $each qualifier (to add multiple values), the default inverse $pull operation will fail because $each is not suported for $pull operations // // You may wish to provide your own implementations that override some of these functions to address these issues if they affect your own application. // For example, store the entire state of the array being mutated by a $set / $addToSet or $pull operation, and then restore it using a inverse $set operation. // The tx.inverse function is called with the tx object as the `this` data context. // [BOTH] this.inverseOperations = { '$set': this._inverseUsingSet, '$addToSet': function (collection, existingDoc, updateMap, opt) { // Inverse of $addToSet is $pull. // TODO -- not really -- if an element is already present, this will pull it // out after an undo, leaving the value changed // This will not work if $addToSet uses modifiers like $each. // In that case, you need to restore state of original array using $set return {command: '$pull', data: updateMap}; }, '$unset': this._inverseUsingSet, '$pull': function (collection, existingDoc, updateMap, opt) { // Inverse of $pull is $addToSet. // TODO -- same problem as $addToSet above, but in reverse return {command: '$addToSet', data: updateMap}; }, '$inc': this._inverseUsingSet, '$push': function (collection, existingDoc, updateMap, opt) { // Inverse of $push is $pull. // This will not work if $push uses modifiers like $each, or if pushed value has duplicates in same array (because $pull will remove all instances of that value). // In that case, you need to restore state of original array using $set return {command: '$pull', data: updateMap}; }, }; // ****************************************************************************************************** // THESE ARE METHODS EXTERNAL TO THE PACKAGE THAT ARE CALLED OUTSIDE THE CONTEXT OF A `Transact` INSTANCE // ****************************************************************************************************** this.apiFunctions = [ 'start', 'insert', 'update', 'remove', 'cancel', 'commit', 'rollback', 'purgeIncomplete', 'undo', 'redo' ] // ***************************************************************************************************** // THESE ARE METHODS INTERNAL TO THE PACKAGE BUT ARE CALLED OUTSIDE THE CONTEXT OF A `Transact` INSTANCE // ***************************************************************************************************** this.internalFunctions = [ '_validateModifier', '_repairAllIncomplete', '_userOrNull', '_processTransaction', '_checkTransactionFields', '_unpackageForUpdate', '_changeItemState', '_repairIncomplete' ]; var self = this; _.each(this.apiFunctions.concat(this.internalFunctions), function (fnName) { self[fnName] = function () { var instance = self._instance(); return instance[fnName].apply(instance, arguments); } }); } // ****************************** // Instantiation of the tx object // ****************************** // This (tx) is the object that gets exported for the app to interact with if (typeof tx === 'undefined') { tx = new TransactionManager(); tx.Transactions = Transactions; // Expose the Transactions collection via tx } else { throw new Meteor.Error('`tx` is already defined in the global scope. The babrahams:transactions package won\'t work.'); } // On the server, instances of `Transact` need to be deleted when the connection closes // Keep an eye on server memory -- this may not be enough to prevent a memory leak // , in which case we can take stronger action if (Meteor.isServer) { Meteor.onConnection(function (connection) { if (connection.id) { connection.onClose(function () { delete tx._txMap[connection.id]; }); } }); } Transact = function () { // *************************** // DONT OVERWRITE ANY OF THESE // *************************** // These are transient per-transaction properties that are used internally by the `Transact` instance that is created for the given transaction this._connectionId = null; // Should be immediately overwritten after instantiation this._transaction_id = null; this._autoTransaction = false; this._items = []; this._startAttempts = 0; this._rollback = false; this._rollbackReason = ''; this._autoCancel = null; this._lastTransactionData = null; this._context = {}; this._description = ''; } // ********** // PUBLIC API // ********** // Either use these methods together on the client, or use them together on the server, but don't try mixing the two! /** * Starts a transaction */ Transact.prototype.start = function (description, options) { if (tx.requireUser && !Meteor.userId()) { tx.log('User must be logged in to start a transaction.'); this._cleanReset(); return; } this._resetAutoCancel(Meteor.isClient); if (!this._transaction_id) { // Set transaction description if (typeof description === 'undefined') { description = 'last action'; } this._description = description; // Set any transaction options e.g. context if (typeof options === 'object') { if ('context' in options) { this._setContext(options['context']); } } this._transaction_id = Random.id(); // Transactions.insert({user_id:Meteor.userId(),timestamp:(ServerTime.date()).getTime(),description:description}); tx.log('Started "' + description + '" with transaction_id: ' + this._transaction_id + ((this._autoTransaction) ? ' (auto started)' : '')); return this._transaction_id; } else { tx.log('An attempt to start a transaction ("' + description + '") was made when a transaction was already open. Open transaction_id: ' + this._transaction_id); if ((tx.forceCommitBeforeStart && !(options && (options.useExistingTransaction || options.forceCommitBeforeStart === false))) || (options && options.forceCommitBeforeStart)) { // null parameter to force the commit // last parameter starts a new transaction after the commit tx.commit(null, undefined, undefined, {description: description, options: options}); } else { this._startAttempts++; if (!(options && options.useExistingTransaction)) { return false; } else { tx.log('Using existing transaction'); return this._transaction_id; } } } } /** * Checks whether a transaction is already started */ Transact.prototype.transactionStarted = function () { return this._transaction_id || null; } /** * Commits all the changes (actions) queued in the current transaction */ Transact.prototype.commit = function (txid, callback, newId, startNewTransaction) { var self = this; if (tx.requireUser && !Meteor.userId()) { tx.log('User must be logged in to commit a transaction.'); this._callback(txid, callback, new Meteor.Error('user-required','No user logged in.'), false); return; } this._lastTransactionData = {}; this._lastTransactionData.transaction_id = this._transaction_id; if (!this._transaction_id) { this._cleanReset(); tx.log("Commit reset transaction to clean state"); this._callback(txid, callback, new Meteor.Error('no-transactions-open', 'No transaction open.'), false); return; } // If a transaction is committed and either null is passed or the exact transaction_id value // then we force commit, no matter how many start attempts there have been // otherwise, if there are startAttempts recorded (i.e. tried to start a transaction explicitly while another one was open) // reduce the number of startAttempts by one, fire the callback with false and return if ((_.isString(txid) && txid === this._transaction_id) || txid === null) { // Force commit now tx.log("Forced commit"); } else if (this._startAttempts > 0) { this._startAttempts--; this._callback(txid, callback, new Meteor.Error('multiple-transactions-open', 'More than one transaction open. Closing one now to leave ' + this._startAttempts + ' transactions open.'), false); return; } /*if (!_.isFunction(txid) && typeof txid !== 'undefined' && txid !== this._transaction_id && _.isString(txid)) { if (txid === null) { tx.log("Forced commit"); } else { this._startAttempts--; this._callback(txid, callback, new Meteor.Error('multiple-transactions-open', 'More than one transaction open. Closing one now to leave ' + this._startAttempts + ' transactions open.'), false); return; } } if (this._startAttempts > 0 && !(!_.isFunction(txid) && typeof txid !== 'undefined' && (txid === this._transaction_id || txid === null))) { this._startAttempts--; this._callback(txid, callback, new Meteor.Error('multiple-transactions-open', 'More than one transaction open. Closing one now to leave ' + this._startAttempts + ' transactions open.'), false); return; }*/ if (_.isEmpty(this._items)) { // Don't record the transaction if nothing happened // Transactions.remove({_id:this._transaction_id}); tx.log('Empty transaction: ' + this._transaction_id); } if (this._rollback) { // One or more permissions failed or the transaction was cancelled, don't process the execution stack var error = this._rollbackReason; var errorDescription = ''; switch (this._rollbackReason) { case 'permission-denied' : errorDescription = 'One or more permissions were denied, so transaction was rolled back.'; break; case 'transaction-cancelled' : errorDescription = 'The transaction was cancelled programatically, so it was rolled back.'; break; default : errorDescription = 'An error occurred when processing an action.'; suppressError = false; break; } this.rollback(); this._callback(txid, callback, new Meteor.Error(error, errorDescription), false); return; } else { tx.log('Beginning commit with transaction_id: ' + this._transaction_id); var doRollback = function (err) { tx.log("Rolling back changes"); self.rollback(); self._callback(txid, callback, new Meteor.Error('error', 'An error occurred, so transaction was rolled back.', err), false); } try { var runCallback = function (res) { if (!self._lastTransactionData) { self._lastTransactionData = {}; } self._lastTransactionData.transaction_id = self._transaction_id; self._lastTransactionData.writes = res.items; var newIds = _.reduce(res.items, function (memo, item) { if (item.action === 'insert') { if (typeof memo[item.collection] === "undefined") { memo[item.collection] = []; } memo[item.collection].push(item._id); } return memo; }, {}); self._cleanReset(); tx.log("Commit reset transaction manager to clean state"); self._callback(txid, callback, null, newIds || true); if (_.isObject(startNewTransaction)) { self.start(startNewTransaction.description, startNewTransaction.options); } } // Okay, this is it -- we're really going to process the queue // So no need to time out the transaction if the processing takes a while // This will be async in the client and syncronous on the server if (Meteor.isServer) { Meteor.clearTimeout(this._autoCancel); var cannotOverridePermissionCheck = false; try { var result = self._processTransaction(this._transaction_id, this._description, this._items, this._context, cannotOverridePermissionCheck); } catch (err) { // tx.log(err); } if (!result) { self._callback(txid, callback, new Meteor.Error('error', 'An error occurred, so transaction was rolled back.'), false); return; } runCallback(result); } else { clearTimeout(this._autoCancel); Meteor.call("_meteorTransactionsProcess", this._transaction_id, this._description, this._items, this._context, function (err, res) { if (err || !res) { if (err) { // tx.log(err); } self._callback(txid, callback, new Meteor.Error('error','An error occurred, so transaction was rolled back.', err), false); return; } else { // Just in case Meteor.is[SomethingElse] that isn't server but isn't client either if (Meteor.isClient) { Tracker.flush(); // Not sure about this } runCallback(res); } }); } /*Transactions.update({_id:this._transaction_id}, {$set:_.extend({context:this._context}, {items:this._items})});*/ } catch (err) { tx.log(err); doRollback(err); return; } } return true; // A flag that at least the call was made // Only the callback from the _meteorTransactionsProcess will really be able to tell us the result of this call } /** * Allows programmatic call of a rollback */ Transact.prototype.rollback = function (rollbackAllDoneItems) { // Need to undo all the instant stuff that's been done // TODO -- this is pretty half-baked -- we should be checking that actions are actually completed before continuing -- not just watching for errors // Eventually, this should be rolled into a single universal function // that iterates over the items array and makes db writes // TODO -- this should probably be run as a method var self = this; var items = this._items.reverse(); var error = false; if (Meteor.isClient) { // Don't let people mess with this from the client // Only the server can call tx.rollback(true); rollbackAllDoneItems = false; } _.each(items, function (obj, index) { if (obj.action === "remove") { if ((obj.instant || rollbackAllDoneItems) && obj.state === 'done') { try { if (obj.doc) { // This was removed from the collection, we need to reinsert it tx.collectionIndex[obj.collection].insert(obj.doc); } else { // This was soft deleted, we need to remove the deleted field tx.collectionIndex[obj.collection].update({_id: obj._id}, {$unset: {deleted: 1, transaction_id: self._transaction_id}}); } tx.log('Rolled back remove'); } catch (err) { tx.log(err); error = true; } } } if (obj.action === "update") { if (((obj.instant || rollbackAllDoneItems) && obj.state === 'done') && typeof obj.inverse !== 'undefined' && obj.inverse.command && obj.inverse.data) { var operation = {}; operation[obj.inverse.command] = self._unpackageForUpdate(obj.inverse.data); // console.log(operation); try { tx.collectionIndex[obj.collection].update({_id: obj._id}, operation); tx.log('Rolled back update'); } catch (err) { tx.log(err); error = true; } } } if (obj.action === "insert") { if ((obj.instant || rollbackAllDoneItems) && obj.state === 'done') { var sel = {_id: obj._id}; // This transaction_id check is in case the document has been subsequently edited -- in that case, we don't want it removed from the database completely sel.transaction_id = self._transaction_id; try { tx.collectionIndex[obj.collection].remove(sel); tx.log('Rolled back insert'); } catch (err) { tx.log(err); error = true; } } } if (!error) { self._changeItemState({ txid: self._transaction_id, index: (items.length - 1) - index, // Because order was reversed for rollback state: "rolledBack" }); } }); if (error) { tx.log("Rollback failed -- you'll need to check your database manually for corrupted records."); tx.log("Here is a log of the actions that were tried and their inverses:"); tx.log("(it was probably one of the inverse actions that caused the problem here)"); tx.log(EJSON.stringify(items, null, 2)); } // Server only // Client can't change the transactions collection directly anyway if (Meteor.isServer) { if (tx.removeRolledBackTransactions) { if (rollbackAllDoneItems) { Transactions.remove({_id: this._transaction_id}); } tx.log('Incomplete transaction removed: ' + this._transaction_id); } else { if (!Transactions.findOne({_id: this._transaction_id})) { if (this._transaction_id) { var transactionRecord = {_id: this._transaction_id, user_id: tx._userOrNull(), description: this._description, items: items, context: this._context, lastModified: ServerTime.date(), state: "rolledBack"}; Transactions.insert(transactionRecord, function (err, res) { if (err) { tx.log('No database record for transaction:', self._transaction_id); } }); } } else { Transactions.update({_id: this._transaction_id}, {$set: {state: "rolledBack"}}); } } } self._cleanReset(); tx.log("Rollback reset transaction manager to clean state"); } /** * Queue an insert */ Transact.prototype.insert = function (collection, newDoc, opt, callback) { if (this._rollback || (tx.requireUser && !Meteor.userId())) { return; } // We need to pass the options object when we do the actual insert // But also need to identify any callback functions var callback = (_.isFunction(callback)) ? callback : ((typeof opt !== 'undefined') ? ((_.isFunction(opt)) ? opt : ((_.isFunction(opt.callback)) ? opt.callback : undefined)) : undefined); if (opt && _.isObject(opt.tx)) { opt = opt.tx; } opt = (_.isObject(opt)) ? _.omit(opt,'tx') : undefined; // This is in case we're going to pass this options object on to, say, collection2 (tx must be gone or we'll create an infinite loop) newDoc = _.clone(newDoc); // NOTE: "collection" is the collection object itself, not a string if (this._permissionCheckOverridden(opt) || this._permissionCheck("insert", collection, newDoc, {})) { var self = this; this._openAutoTransaction(opt && opt.description || 'add ' + collection._name.slice(0, - 1)); self._setContext((opt && opt.context) || tx.makeContext('insert', collection, newDoc, {})); if ((typeof opt !== 'undefined' && opt.instant)) { // || this._autoTransaction try { var newId = newDoc._id || Random.id(); _.extend(newDoc, {_id: newId, transaction_id: self._transaction_id}); var newId = self._doInsert(collection, newDoc, opt, callback); // Should give the same newId value as the one we passed var item = self._createItem('insert', collection, newId, {doc: newDoc}, true, self._permissionCheckOverridden(opt)); this._recordTransaction(item); self._pushToRecord("insert", collection, newId, {doc: newDoc}, true, self._permissionCheckOverridden(opt)); // true is to mark this as an instant change this._closeAutoTransaction(opt, callback, newId); tx.log("Executed instant insert"); return newId; } catch(err) { tx.log(err); tx.log("Rollback initiated by instant insert command"); this._rollback = true; this._rollbackReason = 'insert-error'; } } var newId = newDoc._id || Random.id(); // var newId = self._doInsert(collection,_.extend(newDoc,{transaction_id:self._transaction_id}),opt,callback); self._pushToRecord("insert", collection, newId, {doc: _.extend(newDoc, {_id: newId, transaction_id: self._transaction_id})}, false, self._permissionCheckOverridden(opt)); tx.log("Pushed insert command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' this._closeAutoTransaction(opt, callback); return newId; } else { this._rollback = true; this._rollbackReason = 'permission-denied'; tx.log("Insufficient permissions to insert this document into " + collection._name + ':', newDoc); // Permission to insert not granted return; } } /** * Queue a remove */ Transact.prototype.remove = function (collection, doc, opt, callback) { // Remove any document with a field that has this val // NOTE: "collection" is the collection object itself, not a string if (this._rollback || (tx.requireUser && !Meteor.userId())) { return; } // We need to pass the options object when we do the actual remove // But also need to identify any callback functions var callback = (_.isFunction(callback)) ? callback : ((typeof opt !== 'undefined') ? ((_.isFunction(opt)) ? opt : ((_.isFunction(opt.callback)) ? opt.callback : undefined)) : undefined); if (opt && _.isObject(opt.tx)) { opt = opt.tx; } var _id = (_.isObject(doc)) ? doc._id : doc; var existingDoc = collection.findOne({_id: _id}); // (!_.isObject(doc)) ? collection.findOne({_id: doc}) : doc; // , {transform: null} if (!(_id && existingDoc)) { tx.log('No document found. Make sure you provide an _id field for a document that exists. You passed: ' + JSON.stringify(doc)); } if (this._permissionCheckOverridden(opt) || this._permissionCheck("remove", collection, existingDoc, {})) { var self = this; this._openAutoTransaction(opt && opt.description || 'remove ' + collection._name.slice(0, - 1)); var sel = {_id: _id}; if (Meteor.isServer) { sel.deleted = {$exists: false}; // Can only do removes on client using a simple _id selector } self._setContext((opt && opt.context) || tx.makeContext('remove', collection, existingDoc, {})); if (opt && opt.instant) { try { self._doRemove(collection, _id, sel, true, opt, callback); tx.log("Executed instant remove"); } catch(err) { tx.log(err); tx.log("Rollback initiated by instant remove command"); this._rollback = true; this._rollbackReason = 'remove-error'; } } else { self._doRemove(collection, _id, sel, false, opt, callback); tx.log("Pushed remove command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' } this._closeAutoTransaction(opt, callback); return !this._rollback; // Remove was executed or queued for execution } else { this._rollback = true; this._rollbackReason = 'permission-denied'; tx.log("Insufficient permissions to remove this document from " + collection._name + ':', existingDoc); // Permission to remove not granted return; } } /** * Queue an update */ Transact.prototype.update = function (collection, doc, updates, opt, callback) { // NOTE: "field" should be of the form {$set:{field:value}}, etc. // NOTE: "collection" is the collection object itself, not a string if (this._rollback || (tx.requireUser && !Meteor.userId())) { return; } // We need to pass the options object when we do the actual update // But also need to identify any callback functions var callback = (_.isFunction(callback)) ? callback : ((typeof opt !== 'undefined') ? ((_.isFunction(opt)) ? opt : ((_.isFunction(opt.callback)) ? opt.callback : undefined)) : undefined); if (opt && _.isObject(opt.tx)) { opt = opt.tx; } var opt = (_.isObject(opt)) ? _.omit(opt,'tx') : undefined; var self = this; var _id = (_.isObject(doc)) ? doc._id : doc; var existingDoc = collection.findOne({_id: _id}); // , {transform: null} // var existingDoc = (!_.isObject(doc)) ? collection.findOne({_id:_id}) : doc; // the above is slightly more efficient, in that it doesn't hit the database again // but potential buggy behaviour if a partial doc is passed and the field being updated // isn't in it and it's a $set command and so the inverse is wrongly taken to be $unset if (!(_id && existingDoc)) { tx.log('No document found. Make sure you provide an _id field for a document that exists. You passed: ' + JSON.stringify(doc)); } if (this._permissionCheckOverridden(opt) || this._permissionCheck("update", collection, existingDoc, updates)) { this._openAutoTransaction(opt && opt.description || 'update ' + collection._name.slice(0, - 1)); var actionFields = _.pairs(updates); // console.log('actionFields:', actionFields); // If the command is $set, the update map needs to be split into individual fields in case some didn't exist before and some did // in which case different fields will require different inverse operations actionFields = _.reduce(actionFields, function (memo, action) { var command = action[0]; // console.log("the command:", command); var updateMap = action[1]; // console.log("the map:", updateMap); if (command === '$set') { actions = _.map(updateMap, function (val, key) { var obj = {}; obj[key] = val; return obj; }); // console.log("the actions:", actions); } else { actions = [updateMap]; } _.each(actions, function (update) { memo.push([command, update]); }); return memo; }, []); // console.log("actionFields:", actionFields); var actionFieldsCount = actionFields.length; for (var i = 0; i < actionFieldsCount; i++) { var command = actionFields[i][0]; // console.log("command:", command); var updateMap = actionFields[i][1]; // console.log("updateMap:", updateMap, EJSON.stringify(updateMap)); var inverse; if (typeof opt === 'undefined' || typeof opt.inverse === 'undefined') { // var fieldName = _.keys(actionField[0][1])[0]; // console.log(fieldName); if (typeof opt === 'undefined') { opt = {}; } // console.log("inverse called with (collection, existingDoc, updateMap, opt):", collection, existingDoc, updateMap, opt); inverse = _.isFunction(tx.inverseOperations[command]) && tx.inverseOperations[command].call(self, collection, existingDoc, updateMap, opt) || tx._inverseUsingSet(collection, existingDoc, updateMap, opt); // console.log("inverse:", inverse); } else { // This "opt.inverse" thing is only used if you need to define some tricky inverse operation, but will probably not be necessary in practice // a custom value of opt.inverse needs to be an object of the form: // {command: "$set", data: {fieldName: value}} inverse = opt.inverse; } // console.log("inverse:", inverse); self._setContext((opt && opt.context) || tx.makeContext('update', collection, existingDoc, updates)); var updateData = {command: command, data: updateMap}; if (opt && opt.instant) { try { self._doUpdate(collection, _id, updates, updateData, inverse, true, opt, callback, (i === (actionFieldsCount - 1)) ? true : false); tx.log("Executed instant update"); // true param is to record this as an instant change } catch(err) { tx.log(err); tx.log("Rollback initiated by instant update command"); this._rollback = true; this._rollbackReason = 'update-error'; } } else { (function (updateData, inverse, execute) { // console.log('updateData, inverse, execute:', updateData, inverse, execute); self._doUpdate(collection, _id, updates, updateData, inverse, false, opt, callback, execute); tx.log("Pushed update command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' }).call(this, updateData, inverse, (i === (actionFieldsCount - 1)) ? true : false); } } this._closeAutoTransaction(opt, callback); return !this._rollback; // Update was executed or queued for execution } else { this._rollback = true; this._rollbackReason = 'permission-denied'; tx.log("Insufficient permissions to update this document in " + collection._name + ':', existingDoc); // Permission to update not granted return; } } /** * Cancels a transaction, but doesn't roll back immediately * When the transaction is committed, no queued actions will be executed * and any instant updates, inserts or removes that were made will be rolled back */ Transact.prototype.cancel = function () { tx.log('Transaction cancelled'); this._rollback = true; this._rollbackReason = 'transaction-cancelled'; } /** * Undo the last transaction by the user */ Transact.prototype.undo = function (txid, callback) { var self = this; var callback = (_.isFunction(txid)) ? txid : callback; Meteor.call("_meteorTransactionsUndo", (_.isString(txid)) ? txid : null, function (err, res) { if (Meteor.isClient && res && _.isFunction(tx.onTransactionExpired)) { tx.onTransactionExpired.call(self, err, res); } if (_.isFunction(callback)) { callback.call(self, err, !res); } }); } /** * Redo the last transaction undone by the user */ Transact.prototype.redo = function (txid, callback) { var self = this; var callback = (_.isFunction(txid)) ? txid : callback; Meteor.call("_meteorTransactionsRedo", (_.isString(txid)) ? txid : null, function (err, res) { if (Meteor.isClient && res && _.isFunction(tx.onTransactionExpired)) { tx.onTransactionExpired.call(); } if (_.isFunction(callback)) { callback.call(self, err, !res); } }); } /** * Manually add context to current transaction using _.extend (equivalent of lodash.assign) */ Transact.prototype.setContext = function (context) { this._setContext(context); } /** * Manually add context to current transaction using lodash.merge */ Transact.prototype.mergeContext = function (context) { tx.lodash.merge(this._context, context); } /** * Manually add context to current transaction using lodash.set */ Transact.prototype.setContextPathValue = function (path, value) { tx.lodash.set(this._context, path, value); } /** * Fetch context of the current transaction */ Transact.prototype.getContext = function (context) { return this._context; } // ********************************************************** // INTERNAL METHODS - NOT INTENDED TO BE CALLED FROM APP CODE // ********************************************************** Transact.prototype._doInsert = function (collection, newDoc, opt, callback) { // The following is a very sketchy attempt to support collection2 options // Requires aldeed:collection2 to be before babrahams:transactions in .packages // which we do through a weak dependency on aldeed:collection2 if (this._Collection2Support(collection, opt)) { // This is a brutal workaround to allow collection2 options to do their work var newId = null; var error = null; var returnedId = collection.insert(newDoc, opt, function (err, newId) { if (!err) { newId = newId; } else { error = err; } if (_.isFunction(callback)) { // Let the app handle the error via its own callback callback(error, newId); } }); if (returnedId) { return returnedId; } else { throw new Meteor.Error('Insert failed: reason unknown.', ''); } } else { return collection.insert(newDoc, callback); } } Transact.prototype._doRemove = function (collection, _id, sel, instant, opt, callback) { if (!_.isFunction(callback)) { callback = undefined; } var self = this; if ((opt && typeof opt.softDelete !== 'undefined' && opt.softDelete) || (opt && typeof opt.softDelete === 'undefined' && tx.softDelete) || (typeof opt === 'undefined' && tx.softDelete)) { self._pushToRecord("remove", collection, _id, null, instant, self._permissionCheckOverridden(opt)); if (instant) { var item = self._createItem("remove", collection, _id, null, instant, self._permissionCheckOverridden(opt)); self._recordTransaction(item); collection.update(sel, {$set: {deleted: ServerTime.date(), transaction_id: self._transaction_id}}, callback); } return; } var fields = {hardDelete: true}; // Hard delete document if (instant) { var fullDoc = collection.findOne(sel); // , {transform: null} fields.doc = fullDoc; if (!fullDoc) { // There is no existing document to remove // Return without creating an item in the queue or recording it tx.log("Document not found for removal:", sel); } else { fields.doc.transaction_id = self._transaction_id; fields.doc.deleted = ServerTime.date(); } var item = self._createItem("remove", collection, _id, fields, instant, self._permissionCheckOverridden(opt)); self._recordTransaction(item); collection.remove(sel, callback); } self._pushToRecord("remove", collection ,_id, fields, instant, self._permissionCheckOverridden(opt)); // null is for field data (only used for updates) and true is to mark this as an instant change } Transact.prototype._doUpdate = function (collection, _id, updates, updateData, inverseData, instant, opt, callback, execute) { // console.log("collection, _id, updates, updateData, inverseData, instant, opt, callback, execute", collection, _id, updates, updateData, inverseData, instant, opt, callback, execute); var self = this; if (instant) { if (execute) { if (!_.isFunction(callback)) { callback = undefined; } if (_.isObject(updates["$set"])) { _.extend(updates["$set"], {transaction_id: self._transaction_id}); } else { updates["$set"] = {transaction_id: self._transaction_id}; } // This error, handler business is to allow collection2 `filter:false` to do its work var error = null; var handler = function (err, res) { if (err) { error = err; } if (_.isFunction(callback)) { callback(err, res); } } var item = self._createItem("update", collection, _id, {update: self._packageForStorage(updateData), inverse: self._packageForStorage(inverseData)}, instant,self._permissionCheckOverridden(opt)); self._recordTransaction(item); // No need to check for aldeed:collection2 support (like we do for inserts) // as we can pass an options hash to an update if (_.isObject(opt)) { collection.update({_id:_id}, updates, opt, handler); } else { collection.update({_id:_id}, updates, handler); } if (error) { throw new Meteor.Error('Update failed: ' + error.message, error.reason); return; } delete updates["$set"].transaction_id; } } // console.log(JSON.stringify({update: self._packageForStorage(updateData)}), JSON.stringify({inverse: self._packageForStorage(inverseData)})); self._pushToRecord("update", collection, _id, {update: self._packageForStorage(updateData), inverse: self._packageForStorage(inverseData)}, instant, self._permissionCheckOverridden(opt)); } // This is used only if an {instant: true} parameter is passed Transact.prototype._recordTransaction = function (item) { if (!Transactions.findOne({_id: this._transaction_id})) { // We need to get a real transaction in the database for recoverability purposes var user_id = tx._userOrNull(); Transactions.insert({ _id: this._transaction_id, user_id: user_id, lastModified: ServerTime.date(), description: this._description, context: this._context, state: "pending" }); } Transactions.update({_id: this._transaction_id}, {$addToSet: {items: item}}); } // This is used to check that the document going into the transactions collection has all the necessary fields Transact.prototype._validateModifier = function (modifier, txid) { var fields = modifier && modifier["$addToSet"]; if (!fields) { return null; } return this._checkTransactionFields([fields.items], txid); } Transact.prototype._checkTransactionFields = function (items, txid, cannotOverridePermissionCheck) { // Iterate over all the items that are going to be stored on the transaction stack and check their legitimacy if (!items || !items.length) { return false; } var self = this, recombinedUpdateFields = {}, recombinedInverseFields = {}; var action, collection, doc, details, inverseDetails, fail = false, previouslyNonExistingDoc = {}; _.each(items, function (value) { if (!fail) { collection = value.collection; // Watch out for undo method validation of a remove, where the doc has been hard removed from the collection // Allowing a doc through that was passed from the client is a potential security hole here, but without soft delete, there is no actual record of that doc // So we check that the removed doc's transaction_id value matches the txid if (value.action === 'remove' && value.doc && value.doc.transaction_id === txid) { doc = value.doc; } else if (value.action === 'insert' && value.doc && value.doc.transaction_id === txid) { // Handle redo of an insert (after a previous undo) doc = value.doc; previouslyNonExistingDoc[doc._id] = doc; } else { doc = previouslyNonExistingDoc[value._id] || tx.collectionIndex[collection].findOne({_id: value._id}); } if (Meteor.isClient && !doc) { // Because this runs in a client simulation, // where a removed document may not be in the collection // or we simply may not be subscribed to that doc anymore // we need to consider and watch out for this // we'll extend the benefit of the doubt and let the server handle the real check return; } if (value.action === 'update') { action = 'update'; details = value.update; inverseDetails = value.inverse; recombinedUpdateFields[details.command] = self._unpackageForUpdate(details.data); recombinedInverseFields[inverseDetails.command] = self._unpackageForUpdate(inverseDetails.data); if (!(value.noCheck && !cannotOverridePermissionCheck)) { // Transactions that have been allowed using overridePermissionCheck are considered here, using the noCheck flag // Otherwise the user won't be able to undo them try { fail = !(self._permissionCheck(action, tx.collectionIndex[collection], doc, recombinedUpdateFields) && self._permissionCheck(action, tx.collectionIndex[collection], doc, recombinedInverseFields)); } catch (err) { fail = true; } } return; } else if (value.action === 'insert') { action = 'insert'; } else if (value.action === 'remove' ) { action = 'remove'; } if (!(value.noCheck && !cannotOverridePermissionCheck)) { try { fail = !self._permissionCheck(action, tx.collectionIndex[collection], doc, {}); } catch (err) { // If this transaction was made possible by overridePermissionCheck // It may not be undo/redo-able, unless we follow the same rules (i.e. abide by the noCheck flag) fail = true; } } } }); return !fail; } // Reset everything to a clean state Transact.prototype._cleanReset = function () { this._transaction_id = null; this._autoTransaction = false; this._items = []; this._startAttempts = 0; this._granted = {}; this._rollback = false; this._rollbackReason = ''; this._context = {}; this._description = ''; // Note: we don't reset this._lastTransactionData because we want it to be available AFTER the commit Meteor.clearTimeout(this._autoCancel); } Transact.prototype._callback = function (a, b, err, res) { var c = (_.isFunction(a)) ? a : ((_.isFunction(b)) ? b : null); if (c) { c.call(this._lastTransactionData, err, res); } } // Starts a transaction automatically if one isn't started already Transact.prototype._openAutoTransaction = function (description) {// console.log("Auto open check value for transaction_id: " + this._transaction_id + ' (Auto: ' + this._autoTransaction + ')'); if (!this._transaction_id) { this._autoTransaction = true; this._description = description; this.start(description); // console.log("Auto opened: " + this._transaction_id + ' (Auto: ' + this._autoTransaction + ')'); } } // Commits a transaction automatically if it was started automatically Transact.prototype._closeAutoTransaction = function (opt, callback, newId) {// console.log("Auto commit check value for autoTransaction: " + this._autoTransaction + ' (Auto: ' + this._autoTransaction + ')'); if (this._autoTransaction) { tx.log("Auto committed: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')'; this.commit(opt, undefined, newId); } } // Cancels and commits a transaction automatically if it exceeds the idleTimeout threshold with no new actions Transact.prototype._resetAutoCancel = function (isStartOnClient) { if (Meteor.isServer || isStartOnClient) { var self = this; if (Meteor.isServer) { Meteor.clearTimeout(this._autoCancel); } else { clearTimeout(this._autoCancel); } // It would be nice to use Meteor.clearTimeout and Meteor.setTimeout // but we need to sidestep the rule that timers can't be // started in simulations (which is what Meteor.setTimeout checks for) var onTimeOut = function () { tx.log('Transaction (' + self._transaction_id + ') was cancelled after being inactive for ' + (tx.idleTimeout / 1000) + ' seconds.'); self.rollback(); }; this._autoCancel = (Meteor.isServer) ? Meteor.setTimeout(onTimeOut, tx.idleTimeout) : setTimeout(onTimeOut, tx.idleTimeout); } } // Pushes the record of a single action to the "items" sub document that is going to be recorded in the transactions collection along with data about this transaction Transact.prototype._pushToRecord = function (type, collection, _id, fieldData, instant, permissionCheckOverridden) { var item = this._createItem(type, collection, _id, fieldData, instant, permissionCheckOverridden); this._items.push(item); this._resetAutoCancel(); } // Create item for queue Transact.prototype._createItem = function (type, collection, _id, fieldData, instant, permissionCheckOverridden) { var item = {collection:collection._name, _id:_id, action:type, state: "pending"}; if (typeof instant !== 'undefined' && instant) { item.instant = true; item.state = "done"; } if (typeof permissionCheckOverridden !== 'undefined' && permissionCheckOverridden) { item.noCheck = true; } if (typeof fieldData !== "undefined" && fieldData) { _.extend(item, fieldData); } return item; } // Checks whether the permission check should be waived Transact.prototype._permissionCheckOverridden = function (opt) { return typeof opt !== 'undefined' && opt.overridePermissionCheck; } // Uses a user-defined permission check as to whether this action is allowed or not Transact.prototype._permissionCheck = function (action, collection, doc, updates) { // insert and remove send null for "updates" param, but this is where all the details of any update are found return tx.checkPermission(action, collection, doc, updates); } // Builds the context object Transact.prototype._setContext = function (context) { _.extend(this._context, context); } // This turns the data that has been stored in an array of key-value pairs into an object that mongo can use in an update Transact.prototype._unpackageForUpdate = function (data) { var objForUpdate = {}; _.each(data, function (val) { var unpackagedValue; if (val.json) { unpackagedValue = EJSON.parse(val.json); } else { unpackagedValue = val.value; } objForUpdate[val.key] = unpackagedValue; }); return objForUpdate; } // This turns the data that is given as a mongo update into an array of key-value pairs that can be stored Transact.prototype._packageForStorage = function (update) { var arrForStorage = []; _.each(update.data, function (value, key) { var packagedValue = {}; if ((_.isObject(value) || _.isArray(value)) && !_.isDate(value)) { // a date is considered an object by _.isObject (because it is, I suppose) packagedValue.json = EJSON.stringify(value); } else { packagedValue.value = value; } arrForStorage.push(_.extend({key: key}, packagedValue)); }); return {command: update.command, data: arrForStorage}; } Transact.prototype._Collection2Support = function (collection, opt) { // The following is a very sketchy attempt to support collection2 options // Requires aldeed:collection2 to be before babrahams:transactions in .packages // which we do through a weak dependency on aldeed:collection2 return _.isFunction(collection.simpleSchema) && collection.simpleSchema() !== null && _.find([ "validationContext", "validate", "filter", "autoConvert", "removeEmptyStrings", "getAutoValues", "replace", "upsert", "extendAutoValueContext", "trimStrings", "extendedCustomContext", "transform" ], function (c2option) { return typeof opt[c2option] !== "undefined"; } ); } Transact.prototype._changeItemState = function (data) { // Need to make a write to the transaction record, marking this action as `done` var m = {}; m["items." + data.index + ".state"] = data.state; Transactions.update({_id: data.txid}, {$set: m}); } Transact.prototype._userOrNull = function () { var userId = null; // Need the try-catch because we don't know if this is getting called from a method or from plain server code // If from plain server code, it will throw an error try { userId = _.isFunction(Meteor.userId) && Meteor.userId(); } catch (err) { return null; } return userId; } /** * Actually execute the transaction - i.e. make the db writes */ Transact.prototype._processTransaction = function (txid, description, items, context, cannotOverridePermissionCheck) { check(txid,String); check(description,String); check(items, Array); check(context, Object); check(cannotOverridePermissionCheck, Boolean); if (items && items.length && !tx._checkTransactionFields(items, txid, cannotOverridePermissionCheck)) { throw new Meteor.Error('Transaction not allowed'); // TODO -- we need a bit of a better error message than this! return; } // Here is where we need to execute the 2-phase commit // We begin by setting the transaction document with all write info to a state of pending var existingTransaction = Transactions.findOne({_id: txid}); if (existingTransaction) { // throw new Meteor.Error('Transaction with duplicate _id found'); // return; // This is here because we have some {instant: true} calls // Overwrite the items field with the full complement of items in the queue // Also overwrite context with context from all item calls, // and any manual tx.setContext calls Transactions.update({_id: txid}, {$set: {items: items, context: context, description: description, lastModified: ServerTime.date()}}); } // First, need to iterate over the changes that are going to be made and make sure that, // if there are hard removes, the db version of the doc gets stored on the transaction _.each(items, function (item, index) { if (item.action === "remove" && item.hardDelete) { // Get the existing doc and store it in the transaction record // We overwrite the temporary version of the doc from an instant remove on the client // Because chances are that the whole document was not available on the client var Collection = tx.collectionIndex[item.collection]; var doc = Collection.findOne({_id: item._id}); // , {transform: null} items[index].doc = doc; } }); // STEP 1 - Set initial state of transaction to "pending" if (!existingTransaction && !Transactions.insert({_id: txid, user_id: tx._userOrNull(), description: description, items: items, context: context, lastModified: ServerTime.date(), state: "pending"})) { throw new Meteor.Error('Unable to commit transaction'); return; } // STEP 2 - Make changes specified by items in the queue var success = true; var self = this; var logErrors = function (err) { tx.log(err.toString()); }; var updateCache = {}; var cacheValues = function (item, index) { _.each(item.update.data, function (keyValuePair, i) { if (_.isUndefined(updateCache[item.collection])) { updateCache[item.collection] = {}; } if (_.isUndefined(updateCache[item.collection][item._id])) { updateCache[item.collection][item._id] = {}; } // If there's an item in the update cache, we need to overwrite the transaction record now // Because we know it probably has the wrong inverse value if (!_.isUndefined(updateCache[item.collection][item._id][keyValuePair.key])) { var mod = tx._unpackageForUpdate([{ key: "items." + index + ".inverse.data." + i + ".value", value: updateCache[item.collection][item._id][keyValuePair.key] }]); var update = {$set: mod}; if (cannotOverridePermissionCheck) { update["$unset"] = {noCheck: 1}; } Transactions.update({_id: txid}, update); } updateCache[item.collection][item._id][keyValuePair.key] = keyValuePair.value; }); } var newIdValues = {}; _.each(items, function (item, index) { if (success) { try { if (item.instant) { // Already done -- don't do it again if (item.action === 'update') { // Cache values cacheValues(item, index); } return; } var Collection = tx.collectionIndex[item.collection]; var txData = {transaction_id: txid}; switch (item.action) { case 'insert' : // Will run synchronously. If this fails an exception will be thrown var newId = Collection.insert(_.extend(item.doc, {_id: item._id}, txData)); // The insert succeeded items[index].state = 'done'; tx.log("Executed insert"); break; case 'update' : var modifier = {}; var data = tx._unpackageForUpdate(item.update.data); modifier[item.update.command] = data; if (modifier["$set"]) { // Add to the $set modifier modifier["$set"] = _.extend(modifier["$set"], txData); } else { // Add a $set modifier modifier["$set"] = txData; } // Will run synchronously. If this fails an exception will be thrown Collection.update({_id: item._id}, modifier); // The update succeeded // Cache values if (item.update.command === "$set") { cacheValues(item, index); } items[index].state = 'done'; tx.log("Executed update"); break; case 'remove' : if (item.hardDelete) { // Remove the whole document // Will run synchronously. If this fails an exception will be thrown var removed = Collection.remove({_id: item._id}); // The remove succeeded items[index].state = 'done'; tx.log('Executed remove'); } else { // Just do a soft delete // Will run synchronously. If this fails an exception will be thrown Collection.update({_id: item._id}, {$set: _.extend(txData, {deleted: ServerTime.date()})}); // The remove succeeded items[index].state = 'done'; tx.log('Executed remove'); } break; default : // Do nothing } } catch (err) { success = false; logErrors(err); } } if (success) { tx._changeItemState({ txid: txid, index: index, state: 'done' }); } }); // STEP 3 - Set state to "done" if (success) { var self = this; Transactions.update({_id: txid}, {$set: {state: "done", lastModified: ServerTime.date()}}, function (err, res) { if (err) { tx.log('Could not complete transaction:', txid, err); success = false; } }); if (success) { var finalTxRecord = Transactions.findOne({_id: txid}); return {items: finalTxRecord.items}; } } else { tx.log('Transaction failed'); // Need to run the items through a rollback with actual inverse writes var rollbackAllDoneItems = true; tx._transaction_id = txid; tx._items = items; tx._description = description; tx._context = context; tx.rollback.call(tx, rollbackAllDoneItems); } } // ******* // Methods // ******* // These are the methods that actually do the commits and undo and redo work // They would usually not be called directly -- but invoked using tx.undo() and tx.redo() // Although these methods are pretty large, we're including them on both client and server // because we want to maintain latency compensation on the client Meteor.methods({ '_meteorTransactionsProcess' : function (txid, description, items, context) { check(txid,String); check(description,String); check(items, Array); check(context, Object); var cannotOverridePermissionCheck = true; return tx._processTransaction(txid, description, items, context, cannotOverridePermissionCheck); }, '_meteorTransactionsUndo' : function (txid) { check(txid,Match.OneOf(String,null,undefined)); if (tx.requireUser && !Meteor.userId()) { console.log('You must be logged in to undo actions.'); return; } // Get the latest transaction done by this user and undo it var expired = false; var queuedItems = []; var selector = (txid) ? { _id: txid} : {user_id: tx._userOrNull()}; var sorter = (txid) ? undefined : {sort: {lastModified: -1}, limit:1}; var lastTransaction = Transactions.find(_.extend(selector, {$or: [{undone: null}, {undone: {$exists: false}}], expired: {$exists: false}, state: "done"}), sorter).fetch()[0]; if (lastTransaction && typeof lastTransaction.items !== 'undefined') { // Check that user still has permission to edit all these items // Undo in reverse order // e.g. Need to undo removes first, so that docs are available for undo updates if docs were updated before removal if (tx._checkTransactionFields(lastTransaction.items, lastTransaction._id)) { _.each(lastTransaction.items.reverse(), function (obj, index) { if (obj.action === 'remove') { if (!expired) { if (obj.doc) { // This doc is here because the original was removed // First check for duplicates -- if there is one, the transaction has expired if (tx.collectionIndex[obj.collection].find(obj.doc._id).count()) { expired = true; } else { queuedItems.push(function () { tx.collectionIndex[obj.collection].insert(obj.doc); }); } } else { // This was removed with softDelete queuedItems.push(function () { tx.collectionIndex[obj.collection].update({_id: obj._id}, {$unset: {deleted: 1, transaction_id: lastTransaction._id}}); }); } } } if (obj.action === 'update') { if (!expired) { if (typeof obj.inverse !== 'undefined' && obj.inverse.command && obj.inverse.data) { var operation = {}; operation[obj.inverse.command] = tx._unpackageForUpdate(obj.inverse.data); // console.log('inverse operation:'+EJSON.stringify(operation)); queuedItems.push(function () { tx.collectionIndex[obj.collection].update({_id: obj._id}, operation); /* console.log("operation called:"+EJSON.stringify(operation)); */ }); } } } if (obj.action === 'insert') { if (!expired) { var sel = {_id: obj._id}; // This transaction check is in case the document has been subsequently edited -- in that case, we don't want it removed from the database completely // Instead, we remove this transaction from the visible list by setting expired to true sel.transaction_id = lastTransaction._id; queuedItems.push(function () { tx.collectionIndex[obj.collection].remove(sel); } ); if (tx.collectionIndex[obj.collection].findOne({_id: obj._id, $and: [{transaction_id: {$exists: true}}, {transaction_id: {$ne: lastTransaction._id}}]})) { // Transaction has expired expired = true; // This is to tell the client that the transaction has expired and the undo was not executed } } } }); if (!expired) { // Process queue _.each(queuedItems,function (queuedItem, index) { var fail = false; try { queuedItem.call(); } catch (err) { fail = true; } if (!fail) { tx._changeItemState({ txid: lastTransaction._id, index: (queuedItems.length - 1) - index, // Because array has been reversed for undo state: 'undone' }); } }); // After an undo, we need to update transaction document Transactions.update({_id: lastTransaction._id}, {$set: {undone: ServerTime.date(), state: 'undone'}}); } } else { // Non-empty transaction, but user has lost the permission to edit at least one of the items encompassed by the transaction expired = true; } if (expired) { // Flag this as expired in the db to keep it out of the user's undo/redo stack Transactions.update({_id: lastTransaction._id}, {$set: {expired: true}}); } } else if (lastTransaction) { // Auto clean - this transaction is empty Transactions.remove({_id: lastTransaction._id}); } return expired; // If the function returns true, the undo failed }, '_meteorTransactionsRedo' : function (txid) { check(txid,Match.OneOf(String, null, undefined)); if (tx.requireUser && !Meteor.userId()) { console.log('You must be logged in to redo actions.'); return; } // Get the latest undone transaction by this user and redo it var expired = false; var queuedItems = []; var selector = (txid) ? {_id: txid} : {user_id: tx._userOrNull()}; var sorter = (txid) ? undefined : {sort: {undone: -1}, limit: 1}; var lastUndo = Transactions.find(_.extend(selector, {undone: {$exists: true, $ne: null}, expired: {$exists: false}}), sorter).fetch()[0]; if (lastUndo && typeof lastUndo.items !== 'undefined') { // Check that user still has permission to edit all these items if (tx._checkTransactionFields(lastUndo.items, lastUndo._id)) { _.each(lastUndo.items, function (obj, index) { if (obj.action === "remove") { if (obj.doc) { // This document was removed using a hard delete the first time // We'll hard delete again, making no attempt to save any modifications that have happened to the document in the interim queuedItems.push(function () { tx.collectionIndex[obj.collection].remove({_id: obj._id})}); } else { queuedItems.push(function () { tx.collectionIndex[obj.collection].update({_id: obj._id}, {$set: {deleted: ServerTime.date(), transaction_id: lastUndo._id}})}); } } if (obj.action === "update") { if (typeof obj.update !== 'undefined' && obj.update.command && obj.update.data) { var operation = {}; operation[obj.update.command] = tx._unpackageForUpdate(obj.update.data);// console.log(operation); queuedItems.push(function () { tx.collectionIndex[obj.collection].update({_id: obj._id}, operation); }); } } if (obj.action === "insert") { if (!expired) { if (!tx.collectionIndex[obj.collection].find({_id: obj._id}).count()) { var newDoc = _.extend(obj.doc, {transaction_id: lastUndo._id,_id: obj._id}); queuedItems.push(function () { tx.collectionIndex[obj.collection].insert(newDoc) }); } else { // This is an edited doc that was not removed on last undo // Transaction has expired expired = true; // This is to tell the client that the transaction has expired and the redo was not executed } } } }); if (!expired) { // Process queue _.each(queuedItems, function (queuedItem, index) { var fail = false; try { queuedItem.call(); } catch (err) { fail = true; } if (!fail) { tx._changeItemState({ txid: lastUndo._id, index: index, state: 'done' }); } }); // After a redo, we need to update the transaction document Transactions.update({_id: lastUndo._id}, {$unset: {undone: 1}, $set: {state: 'done'}}); // ,$set:{lastModified: ServerTime.date()} -- LEADS TO UNEXPECTED RESULTS } } else { // User no longer has permission to edit one of the items in this transaction expired = true; } if (expired) { // Flag this transaction as expired to keep it out of the user's undo-redo stack Transactions.update({_id: lastUndo._id}, {$set: {expired: true}}); } } return expired; // If the function returns true, the redo failed } }); // Wrap DB write operation methods // Wrapping technique shamelessly stolen from aldeed:collection2 codebase // (https://github.com/aldeed/meteor-collection2/blob/master/collection2.js) and modified for this package // backwards compatibility if (typeof Mongo === "undefined") { Mongo = {}; Mongo.Collection = Meteor.Collection; } _.each(['insert', 'update', 'remove'], function (methodName) { var _super = Mongo.Collection.prototype[methodName]; Mongo.Collection.prototype[methodName] = function () { var self = this, args = _.toArray(arguments); // self is the Mongo.Collection instance var optionsArg = (methodName === 'update') ? 2 : 1; if (_.isObject(args[optionsArg]) && args[optionsArg].tx) { args.unshift(self); return tx[methodName].apply(tx, args); } return _super.apply(self, args); }; }); // Here we ensure the the tx object is aware of the apps collections and can access them by name // we use dburles:mongo-collection-instances package to do this. // We also check for the presence of SimpleSchema and extend the schema of existing // collections to allow for the fields that transactions will add to documents Meteor.startup(function () { Meteor.defer(function () { // Auto detect collections tx.collectionIndex = (_.isEmpty(tx.collectionIndex)) ? _.reduce(Mongo.Collection.getAll(), function (memo, coll) { memo[coll.name] = coll.instance; return memo; }, {}) : tx.collectionIndex; // Built in support for simple-schema/collection2 if (typeof SimpleSchema !== 'undefined') { _.each(tx.collectionIndex, function (collection) { if (_.isFunction(collection.simpleSchema) && collection.simpleSchema() !== null && collection._c2) { collection.attachSchema({deleted: {type: Date, label: "Deleted", optional: true}, transaction_id: {type:String, label: "transaction_id", optional: true}, _id: {type: String, label: "_id", optional: true}}); } }); if (_.isFunction(tx.Transactions.attachSchema)) { var userPattern = { type:String, label:"User Id" } if (!tx.requireUser) { userPattern.optional = true; } var TransactionSchema = new SimpleSchema({ "context": { type:Object, label:"Context", blackbox:true, optional:true }, "description": { type:String, label:"Description" }, "items": { type:[Object], label:"Items", blackbox:true, optional:true }, "lastModified": { type:Date, label:"Timestamp" }, "undone": { type:Date, label:"Undone", optional:true }, "user_id": userPattern, "expired": { type:Boolean, label:"Expired", optional:true }, "state": { type:String, label:"state" } }); tx.Transactions.attachSchema(TransactionSchema); } } }); });
Completed refactor and fixed logging
lib/transactions-common.js
Completed refactor and fixed logging
<ide><path>ib/transactions-common.js <ide> // [BOTH] <ide> <ide> this.log = function () { if (this.logging) { _.each(arguments, function (message) { console.log(message); }) } }; <add> <add> <add> // To show the connection in the logging from `Transact` instances, set `tx.showConnection = true;` <add> // Useful for debugging <add> <add> this.showConnection = false; <ide> <ide> <ide> // Because most/many db writes will come through the transaction manager, this is a good place to do some permission checking <ide> 'rollback', <ide> 'purgeIncomplete', <ide> 'undo', <del> 'redo' <add> 'redo', <add> 'setContext', <add> 'mergeContext', <add> 'setContextPathValue', <add> 'getContext', <add> 'transactionStarted' <ide> ] <ide> <ide> // ***************************************************************************************************** <ide> <ide> Transact.prototype.start = function (description, options) { <ide> if (tx.requireUser && !Meteor.userId()) { <del> tx.log('User must be logged in to start a transaction.'); <add> this.log('User must be logged in to start a transaction.'); <ide> this._cleanReset(); <ide> return; <ide> } <ide> } <ide> <ide> this._transaction_id = Random.id(); // Transactions.insert({user_id:Meteor.userId(),timestamp:(ServerTime.date()).getTime(),description:description}); <del> tx.log('Started "' + description + '" with transaction_id: ' + this._transaction_id + ((this._autoTransaction) ? ' (auto started)' : '')); <add> this.log('Started "' + description + '" with transaction_id: ' + this._transaction_id + ((this._autoTransaction) ? ' (auto started)' : '')); <ide> return this._transaction_id; <ide> } <ide> else { <del> tx.log('An attempt to start a transaction ("' + description + '") was made when a transaction was already open. Open transaction_id: ' + this._transaction_id); <add> this.log('An attempt to start a transaction ("' + description + '") was made when a transaction was already open. Open transaction_id: ' + this._transaction_id); <ide> if ((tx.forceCommitBeforeStart && !(options && (options.useExistingTransaction || options.forceCommitBeforeStart === false))) || (options && options.forceCommitBeforeStart)) { <ide> // null parameter to force the commit <ide> // last parameter starts a new transaction after the commit <ide> return false; <ide> } <ide> else { <del> tx.log('Using existing transaction'); <add> this.log('Using existing transaction'); <ide> return this._transaction_id; <ide> } <ide> } <ide> Transact.prototype.commit = function (txid, callback, newId, startNewTransaction) { <ide> var self = this; <ide> if (tx.requireUser && !Meteor.userId()) { <del> tx.log('User must be logged in to commit a transaction.'); <add> self.log('User must be logged in to commit a transaction.'); <ide> this._callback(txid, callback, new Meteor.Error('user-required','No user logged in.'), false); <ide> return; <ide> } <ide> this._lastTransactionData.transaction_id = this._transaction_id; <ide> if (!this._transaction_id) { <ide> this._cleanReset(); <del> tx.log("Commit reset transaction to clean state"); <add> self.log("Commit reset transaction to clean state"); <ide> this._callback(txid, callback, new Meteor.Error('no-transactions-open', 'No transaction open.'), false); <ide> return; <ide> } <ide> // reduce the number of startAttempts by one, fire the callback with false and return <ide> if ((_.isString(txid) && txid === this._transaction_id) || txid === null) { <ide> // Force commit now <del> tx.log("Forced commit"); <add> self.log("Forced commit"); <ide> } <ide> else if (this._startAttempts > 0) { <ide> this._startAttempts--; <ide> if (_.isEmpty(this._items)) { <ide> // Don't record the transaction if nothing happened <ide> // Transactions.remove({_id:this._transaction_id}); <del> tx.log('Empty transaction: ' + this._transaction_id); <add> self.log('Empty transaction: ' + this._transaction_id); <ide> } <ide> if (this._rollback) { <ide> // One or more permissions failed or the transaction was cancelled, don't process the execution stack <ide> return; <ide> } <ide> else { <del> tx.log('Beginning commit with transaction_id: ' + this._transaction_id); <add> self.log('Beginning commit with transaction_id: ' + this._transaction_id); <ide> var doRollback = function (err) { <del> tx.log("Rolling back changes"); <add> self.log("Rolling back changes"); <ide> self.rollback(); <ide> self._callback(txid, callback, new Meteor.Error('error', 'An error occurred, so transaction was rolled back.', err), false); <ide> } <ide> return memo; <ide> }, {}); <ide> self._cleanReset(); <del> tx.log("Commit reset transaction manager to clean state"); <add> self.log("Commit reset transaction manager to clean state"); <ide> self._callback(txid, callback, null, newIds || true); <ide> if (_.isObject(startNewTransaction)) { <ide> self.start(startNewTransaction.description, startNewTransaction.options); <ide> var result = self._processTransaction(this._transaction_id, this._description, this._items, this._context, cannotOverridePermissionCheck); <ide> } <ide> catch (err) { <del> // tx.log(err); <add> // self.log(err); <ide> } <ide> if (!result) { <ide> self._callback(txid, callback, new Meteor.Error('error', 'An error occurred, so transaction was rolled back.'), false); <ide> Meteor.call("_meteorTransactionsProcess", this._transaction_id, this._description, this._items, this._context, function (err, res) { <ide> if (err || !res) { <ide> if (err) { <del> // tx.log(err); <add> // self.log(err); <ide> } <ide> self._callback(txid, callback, new Meteor.Error('error','An error occurred, so transaction was rolled back.', err), false); <ide> return; <ide> /*Transactions.update({_id:this._transaction_id}, {$set:_.extend({context:this._context}, {items:this._items})});*/ <ide> } <ide> catch (err) { <del> tx.log(err); <add> self.log(err); <ide> doRollback(err); <ide> return; <ide> } <ide> // This was soft deleted, we need to remove the deleted field <ide> tx.collectionIndex[obj.collection].update({_id: obj._id}, {$unset: {deleted: 1, transaction_id: self._transaction_id}}); <ide> } <del> tx.log('Rolled back remove'); <add> self.log('Rolled back remove'); <ide> } <ide> catch (err) { <del> tx.log(err); <add> self.log(err); <ide> error = true; <ide> } <ide> } <ide> operation[obj.inverse.command] = self._unpackageForUpdate(obj.inverse.data); // console.log(operation); <ide> try { <ide> tx.collectionIndex[obj.collection].update({_id: obj._id}, operation); <del> tx.log('Rolled back update'); <add> self.log('Rolled back update'); <ide> } <ide> catch (err) { <del> tx.log(err); <add> self.log(err); <ide> error = true; <ide> } <ide> } <ide> sel.transaction_id = self._transaction_id; <ide> try { <ide> tx.collectionIndex[obj.collection].remove(sel); <del> tx.log('Rolled back insert'); <add> self.log('Rolled back insert'); <ide> } <ide> catch (err) { <del> tx.log(err); <add> self.log(err); <ide> error = true; <ide> } <ide> } <ide> } <ide> }); <ide> if (error) { <del> tx.log("Rollback failed -- you'll need to check your database manually for corrupted records."); <del> tx.log("Here is a log of the actions that were tried and their inverses:"); <del> tx.log("(it was probably one of the inverse actions that caused the problem here)"); <del> tx.log(EJSON.stringify(items, null, 2)); <add> self.log("Rollback failed -- you'll need to check your database manually for corrupted records."); <add> self.log("Here is a log of the actions that were tried and their inverses:"); <add> self.log("(it was probably one of the inverse actions that caused the problem here)"); <add> self.log(EJSON.stringify(items, null, 2)); <ide> } <ide> // Server only <ide> // Client can't change the transactions collection directly anyway <ide> if (rollbackAllDoneItems) { <ide> Transactions.remove({_id: this._transaction_id}); <ide> } <del> tx.log('Incomplete transaction removed: ' + this._transaction_id); <add> self.log('Incomplete transaction removed: ' + this._transaction_id); <ide> } <ide> else { <ide> if (!Transactions.findOne({_id: this._transaction_id})) { <ide> var transactionRecord = {_id: this._transaction_id, user_id: tx._userOrNull(), description: this._description, items: items, context: this._context, lastModified: ServerTime.date(), state: "rolledBack"}; <ide> Transactions.insert(transactionRecord, function (err, res) { <ide> if (err) { <del> tx.log('No database record for transaction:', self._transaction_id); <add> self.log('No database record for transaction:', self._transaction_id); <ide> } <ide> }); <ide> } <ide> } <ide> } <ide> self._cleanReset(); <del> tx.log("Rollback reset transaction manager to clean state"); <add> self.log("Rollback reset transaction manager to clean state"); <ide> } <ide> <ide> /** <ide> this._recordTransaction(item); <ide> self._pushToRecord("insert", collection, newId, {doc: newDoc}, true, self._permissionCheckOverridden(opt)); // true is to mark this as an instant change <ide> this._closeAutoTransaction(opt, callback, newId); <del> tx.log("Executed instant insert"); <add> self.log("Executed instant insert"); <ide> return newId; <ide> } <ide> catch(err) { <del> tx.log(err); <del> tx.log("Rollback initiated by instant insert command"); <add> self.log(err); <add> self.log("Rollback initiated by instant insert command"); <ide> this._rollback = true; <ide> this._rollbackReason = 'insert-error'; <ide> } <ide> var newId = newDoc._id || Random.id(); <ide> // var newId = self._doInsert(collection,_.extend(newDoc,{transaction_id:self._transaction_id}),opt,callback); <ide> self._pushToRecord("insert", collection, newId, {doc: _.extend(newDoc, {_id: newId, transaction_id: self._transaction_id})}, false, self._permissionCheckOverridden(opt)); <del> tx.log("Pushed insert command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' <add> self.log("Pushed insert command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' <ide> this._closeAutoTransaction(opt, callback); <ide> return newId; <ide> } <ide> else { <ide> this._rollback = true; <ide> this._rollbackReason = 'permission-denied'; <del> tx.log("Insufficient permissions to insert this document into " + collection._name + ':', newDoc); // Permission to insert not granted <add> this.log("Insufficient permissions to insert this document into " + collection._name + ':', newDoc); // Permission to insert not granted <ide> return; <ide> } <ide> <ide> var _id = (_.isObject(doc)) ? doc._id : doc; <ide> var existingDoc = collection.findOne({_id: _id}); // (!_.isObject(doc)) ? collection.findOne({_id: doc}) : doc; // , {transform: null} <ide> if (!(_id && existingDoc)) { <del> tx.log('No document found. Make sure you provide an _id field for a document that exists. You passed: ' + JSON.stringify(doc)); <add> this.log('No document found. Make sure you provide an _id field for a document that exists. You passed: ' + JSON.stringify(doc)); <ide> } <ide> if (this._permissionCheckOverridden(opt) || this._permissionCheck("remove", collection, existingDoc, {})) { <ide> var self = this; <ide> if (opt && opt.instant) { <ide> try { <ide> self._doRemove(collection, _id, sel, true, opt, callback); <del> tx.log("Executed instant remove"); <add> self.log("Executed instant remove"); <ide> } <ide> catch(err) { <del> tx.log(err); <del> tx.log("Rollback initiated by instant remove command"); <add> self.log(err); <add> self.log("Rollback initiated by instant remove command"); <ide> this._rollback = true; <ide> this._rollbackReason = 'remove-error'; <ide> } <ide> } <ide> else { <ide> self._doRemove(collection, _id, sel, false, opt, callback); <del> tx.log("Pushed remove command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' <add> self.log("Pushed remove command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' <ide> } <ide> this._closeAutoTransaction(opt, callback); <ide> return !this._rollback; // Remove was executed or queued for execution <ide> else { <ide> this._rollback = true; <ide> this._rollbackReason = 'permission-denied'; <del> tx.log("Insufficient permissions to remove this document from " + collection._name + ':', existingDoc); // Permission to remove not granted <add> this.log("Insufficient permissions to remove this document from " + collection._name + ':', existingDoc); // Permission to remove not granted <ide> return; <ide> } <ide> } <ide> // but potential buggy behaviour if a partial doc is passed and the field being updated <ide> // isn't in it and it's a $set command and so the inverse is wrongly taken to be $unset <ide> if (!(_id && existingDoc)) { <del> tx.log('No document found. Make sure you provide an _id field for a document that exists. You passed: ' + JSON.stringify(doc)); <add> self.log('No document found. Make sure you provide an _id field for a document that exists. You passed: ' + JSON.stringify(doc)); <ide> } <ide> if (this._permissionCheckOverridden(opt) || this._permissionCheck("update", collection, existingDoc, updates)) { <ide> this._openAutoTransaction(opt && opt.description || 'update ' + collection._name.slice(0, - 1)); <ide> if (opt && opt.instant) { <ide> try { <ide> self._doUpdate(collection, _id, updates, updateData, inverse, true, opt, callback, (i === (actionFieldsCount - 1)) ? true : false); <del> tx.log("Executed instant update"); // true param is to record this as an instant change <add> self.log("Executed instant update"); // true param is to record this as an instant change <ide> } <ide> catch(err) { <del> tx.log(err); <del> tx.log("Rollback initiated by instant update command"); <add> self.log(err); <add> self.log("Rollback initiated by instant update command"); <ide> this._rollback = true; <ide> this._rollbackReason = 'update-error'; <ide> } <ide> else { <ide> (function (updateData, inverse, execute) { // console.log('updateData, inverse, execute:', updateData, inverse, execute); <ide> self._doUpdate(collection, _id, updates, updateData, inverse, false, opt, callback, execute); <del> tx.log("Pushed update command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' <add> self.log("Pushed update command to stack: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')' <ide> }).call(this, updateData, inverse, (i === (actionFieldsCount - 1)) ? true : false); <ide> } <ide> } <ide> else { <ide> this._rollback = true; <ide> this._rollbackReason = 'permission-denied'; <del> tx.log("Insufficient permissions to update this document in " + collection._name + ':', existingDoc); // Permission to update not granted <add> self.log("Insufficient permissions to update this document in " + collection._name + ':', existingDoc); // Permission to update not granted <ide> return; <ide> } <ide> } <ide> */ <ide> <ide> Transact.prototype.cancel = function () { <del> tx.log('Transaction cancelled'); <add> this.log('Transaction cancelled'); <ide> this._rollback = true; <ide> this._rollbackReason = 'transaction-cancelled'; <ide> } <ide> // ********************************************************** <ide> // INTERNAL METHODS - NOT INTENDED TO BE CALLED FROM APP CODE <ide> // ********************************************************** <add> <add>Transact.prototype.log = function (message) { <add> tx.log(((tx.showConnection) ? '[Connection: ' + this._connectionId + '] ' : '') + message); <add>} <ide> <ide> Transact.prototype._doInsert = function (collection, newDoc, opt, callback) { <ide> // The following is a very sketchy attempt to support collection2 options <ide> if (!fullDoc) { <ide> // There is no existing document to remove <ide> // Return without creating an item in the queue or recording it <del> tx.log("Document not found for removal:", sel); <add> self.log("Document not found for removal:", sel); <ide> } <ide> else { <ide> fields.doc.transaction_id = self._transaction_id; <ide> <ide> Transact.prototype._closeAutoTransaction = function (opt, callback, newId) {// console.log("Auto commit check value for autoTransaction: " + this._autoTransaction + ' (Auto: ' + this._autoTransaction + ')'); <ide> if (this._autoTransaction) { <del> tx.log("Auto committed: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')'; <add> this.log("Auto committed: " + this._transaction_id); // + ' (Auto: ' + this._autoTransaction + ')'; <ide> this.commit(opt, undefined, newId); <ide> } <ide> } <ide> // but we need to sidestep the rule that timers can't be <ide> // started in simulations (which is what Meteor.setTimeout checks for) <ide> var onTimeOut = function () { <del> tx.log('Transaction (' + self._transaction_id + ') was cancelled after being inactive for ' + (tx.idleTimeout / 1000) + ' seconds.'); <add> self.log('Transaction (' + self._transaction_id + ') was cancelled after being inactive for ' + (tx.idleTimeout / 1000) + ' seconds.'); <ide> self.rollback(); <ide> }; <ide> this._autoCancel = (Meteor.isServer) ? Meteor.setTimeout(onTimeOut, tx.idleTimeout) : setTimeout(onTimeOut, tx.idleTimeout); <ide> var success = true; <ide> var self = this; <ide> var logErrors = function (err) { <del> tx.log(err.toString()); <add> self.log(err.toString()); <ide> }; <ide> var updateCache = {}; <ide> var cacheValues = function (item, index) { <ide> <ide> // The insert succeeded <ide> items[index].state = 'done'; <del> tx.log("Executed insert"); <add> self.log("Executed insert"); <ide> <ide> break; <ide> case 'update' : <ide> cacheValues(item, index); <ide> } <ide> items[index].state = 'done'; <del> tx.log("Executed update"); <add> self.log("Executed update"); <ide> <ide> break; <ide> case 'remove' : <ide> <ide> // The remove succeeded <ide> items[index].state = 'done'; <del> tx.log('Executed remove'); <add> self.log('Executed remove'); <ide> } <ide> else { <ide> // Just do a soft delete <ide> <ide> // The remove succeeded <ide> items[index].state = 'done'; <del> tx.log('Executed remove'); <add> self.log('Executed remove'); <ide> <ide> } <ide> break; <ide> var self = this; <ide> Transactions.update({_id: txid}, {$set: {state: "done", lastModified: ServerTime.date()}}, function (err, res) { <ide> if (err) { <del> tx.log('Could not complete transaction:', txid, err); <add> self.log('Could not complete transaction:', txid, err); <ide> success = false; <ide> } <ide> }); <ide> } <ide> } <ide> else { <del> tx.log('Transaction failed'); <add> self.log('Transaction failed'); <ide> // Need to run the items through a rollback with actual inverse writes <ide> var rollbackAllDoneItems = true; <ide> tx._transaction_id = txid;
Java
apache-2.0
4da12cf36d885e9d3a5247ece49f32fc173ea0e0
0
TorosyanV/shop,TorosyanV/shop,TorosyanV/shop,TorosyanV/shop
/* This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.shop; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class TortApplication { public static void main(String[] args) { SpringApplication.run(TortApplication.class, args); } }
src/main/java/com/shop/TortApplication.java
package com.shop; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class TortApplication { public static void main(String[] args) { SpringApplication.run(TortApplication.class, args); } }
Update TortApplication.java
src/main/java/com/shop/TortApplication.java
Update TortApplication.java
<ide><path>rc/main/java/com/shop/TortApplication.java <add>/* This program is free software: you can redistribute it and/or modify <add> it under the terms of the GNU General Public License as published by <add> the Free Software Foundation, either version 3 of the License, or <add> (at your option) any later version. <add> <add> This program is distributed in the hope that it will be useful, <add> but WITHOUT ANY WARRANTY; without even the implied warranty of <add> MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the <add> GNU General Public License for more details. <add> <add> You should have received a copy of the GNU General Public License <add> along with this program. If not, see <http://www.gnu.org/licenses/>. <add> */ <add> <ide> package com.shop; <ide> <ide> import org.springframework.boot.SpringApplication;
Java
mit
5e5480e8be5e3fe2e8d4c5e5e287bdc86eeb9dc0
0
marinator86/bbm,marinator86/bbm
package bbm; import bbm.actions.buildtrigger.BuildTriggerModule; import bbm.database.orgs.OrgModule; import bbm.database.repositories.RepositoryModule; import bbm.handlers.*; import bbm.actions.ActionModule; import bbm.database.DatabaseModule; import bbm.database.sandboxes.SandboxModule; import bbm.database.branches.BranchModule; import bbm.handlers.hooks.BitbucketWebhookHandler; import bbm.handlers.renderer.BranchesListRenderer; import bbm.handlers.renderer.RepositoryListRenderer; import bbm.handlers.renderer.RepositoryRenderer; import bbm.salesforce.SalesforceModule; import org.pac4j.http.client.direct.DirectBasicAuthClient; import org.pac4j.http.credentials.authenticator.test.SimpleTestUsernamePasswordAuthenticator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ratpack.pac4j.RatpackPac4j; import ratpack.server.BaseDir; import ratpack.server.RatpackServer; import ratpack.groovy.template.TextTemplateModule; import ratpack.guice.Guice; import ratpack.session.SessionModule; import static ratpack.groovy.Groovy.groovyTemplate; public class Main { private final static Logger logger = LoggerFactory.getLogger(Main.class); public static void main(String... args) throws Exception { RatpackServer.start(s -> s .serverConfig(c -> c .baseDir(BaseDir.find()) .env()) .registry(Guice.registry(b -> { b.module(SessionModule.class); b.module(TextTemplateModule.class, conf -> conf.setStaticallyCompile(true)); b.module(DatabaseModule.class); b.module(BuildTriggerModule.class); b.module(RepositoryModule.class); b.module(OrgModule.class); b.module(SandboxModule.class); b.module(BranchModule.class); b.module(ActionModule.class); b.module(SalesforceModule.class); b.bind(BitbucketWebhookHandler.class); b.bind(OrgActionHandler.class); b.bind(ActionRenderer.class); b.bind(RepositoryListRenderer.class); b.bind(RepositoryRenderer.class); b.bind(OptionalOrgRenderer.class); b.bind(InstructionActionHandler.class); b.bind(GetRepositoriesHandler.class); b.bind(PostRepositoryHandler.class); b.bind(DeleteRepositoryHandler.class); b.bind(GetRepositoryBranchesHandler.class); b.bind(BranchesListRenderer.class); b.bind(ErrorHandler.class); })) .handlers(chain -> { final DirectBasicAuthClient directBasicAuthClient = new DirectBasicAuthClient(new SimpleTestUsernamePasswordAuthenticator()); chain .all(RatpackPac4j.authenticator(directBasicAuthClient)) .all(RatpackPac4j.requireAuth(DirectBasicAuthClient.class)) .get(ctx -> ctx.render(groovyTemplate("index.html"))) .get("instruct/:repositoryUID/:branchName/:commit?", InstructionActionHandler.class) .prefix("admin", admin -> { admin.path("sync", OrgActionHandler.class); }) .prefix("hooks", hookChain -> { hookChain.post("bitbucket", BitbucketWebhookHandler.class); }) .prefix("repositories", repositories -> { repositories.get(GetRepositoriesHandler.class); repositories.prefix(":uuid", repository ->{ repository.delete(DeleteRepositoryHandler.class); repository.post(PostRepositoryHandler.class); repository.get("branches", GetRepositoryBranchesHandler.class); }); }) .files(f -> f.dir("public")); }) ); } }
src/main/java/bbm/Main.java
package bbm; import bbm.actions.buildtrigger.BuildTriggerModule; import bbm.database.orgs.OrgModule; import bbm.database.repositories.RepositoryModule; import bbm.handlers.*; import bbm.actions.ActionModule; import bbm.database.DatabaseModule; import bbm.database.sandboxes.SandboxModule; import bbm.database.branches.BranchModule; import bbm.handlers.hooks.BitbucketWebhookHandler; import bbm.handlers.renderer.BranchesListRenderer; import bbm.handlers.renderer.RepositoryListRenderer; import bbm.handlers.renderer.RepositoryRenderer; import bbm.salesforce.SalesforceModule; import org.pac4j.http.client.direct.DirectBasicAuthClient; import org.pac4j.http.client.indirect.FormClient; import org.pac4j.http.credentials.authenticator.test.SimpleTestUsernamePasswordAuthenticator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import ratpack.pac4j.RatpackPac4j; import ratpack.server.BaseDir; import ratpack.server.RatpackServer; import ratpack.groovy.template.TextTemplateModule; import ratpack.guice.Guice; import ratpack.session.SessionModule; import static ratpack.groovy.Groovy.groovyTemplate; import static java.util.Collections.singletonMap; public class Main { private final static Logger logger = LoggerFactory.getLogger(Main.class); public static void main(String... args) throws Exception { RatpackServer.start(s -> s .serverConfig(c -> c .baseDir(BaseDir.find()) .env()) .registry(Guice.registry(b -> { b.module(SessionModule.class); b.module(TextTemplateModule.class, conf -> conf.setStaticallyCompile(true)); b.module(DatabaseModule.class); b.module(BuildTriggerModule.class); b.module(RepositoryModule.class); b.module(OrgModule.class); b.module(SandboxModule.class); b.module(BranchModule.class); b.module(ActionModule.class); b.module(SalesforceModule.class); b.bind(BitbucketWebhookHandler.class); b.bind(OrgActionHandler.class); b.bind(ActionRenderer.class); b.bind(RepositoryListRenderer.class); b.bind(RepositoryRenderer.class); b.bind(OptionalOrgRenderer.class); b.bind(InstructionActionHandler.class); b.bind(GetRepositoriesHandler.class); b.bind(PostRepositoryHandler.class); b.bind(DeleteRepositoryHandler.class); b.bind(GetRepositoryBranchesHandler.class); b.bind(BranchesListRenderer.class); b.bind(ErrorHandler.class); })) .handlers(chain -> { final DirectBasicAuthClient directBasicAuthClient = new DirectBasicAuthClient(new SimpleTestUsernamePasswordAuthenticator()); chain .all(RatpackPac4j.authenticator(directBasicAuthClient)) .all(RatpackPac4j.requireAuth(DirectBasicAuthClient.class)) .get(ctx -> ctx.render(groovyTemplate("index.html"))) .get("instruct/:repositoryUID/:branchName/:commit?", InstructionActionHandler.class) .prefix("admin", admin -> { admin.path("sync", OrgActionHandler.class); }) .prefix("hooks", hookChain -> { hookChain.post("bitbucket", BitbucketWebhookHandler.class); }) .prefix("repositories", repositories -> { repositories.get(GetRepositoriesHandler.class); repositories.prefix(":uuid", repository ->{ repository.delete(DeleteRepositoryHandler.class); repository.post(PostRepositoryHandler.class); repository.get("branches", GetRepositoryBranchesHandler.class); }); }) .files(f -> f.dir("public")); }) ); } }
removed unused imports
src/main/java/bbm/Main.java
removed unused imports
<ide><path>rc/main/java/bbm/Main.java <ide> import bbm.handlers.renderer.RepositoryRenderer; <ide> import bbm.salesforce.SalesforceModule; <ide> import org.pac4j.http.client.direct.DirectBasicAuthClient; <del>import org.pac4j.http.client.indirect.FormClient; <ide> import org.pac4j.http.credentials.authenticator.test.SimpleTestUsernamePasswordAuthenticator; <ide> import org.slf4j.Logger; <ide> import org.slf4j.LoggerFactory; <ide> import ratpack.session.SessionModule; <ide> <ide> import static ratpack.groovy.Groovy.groovyTemplate; <del>import static java.util.Collections.singletonMap; <ide> <ide> public class Main { <ide> private final static Logger logger = LoggerFactory.getLogger(Main.class);
Java
mit
4bdbdbbc0b0c9f14cdf2a643abc4b39a9dc6dd8e
0
CS2103JAN2017-W14-B2/main,CS2103JAN2017-W14-B2/main
package guitests; import static org.junit.Assert.assertTrue; import static seedu.taskboss.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import org.junit.Test; import seedu.taskboss.commons.core.Messages; import seedu.taskboss.logic.commands.MarkDoneCommand; import seedu.taskboss.testutil.TaskBuilder; import seedu.taskboss.testutil.TestTask; public class MarkDoneCommandTest extends TaskBossGuiTest { // The list of tasks in the task list panel is expected to match this list. // This list is updated with every successful call to assertEditSuccess(). TestTask[] expectedTasksList = td.getTypicalTasks(); @Test public void markTaskDone_success() throws Exception { int taskBossIndex = 8; TestTask markedDoneTask = new TaskBuilder().withName("Submit progress report") .withPriorityLevel("3").withStartDateTime("Feb 9, 2017 5pm") .withEndDateTime("10am Feb 24, 2017 5pm") .withInformation("notify department head") .withCategories("Done").build(); assertMarkDoneSuccess(taskBossIndex, taskBossIndex, markedDoneTask); } @Test public void markDone_findThenMarkDone_success() throws Exception { commandBox.runCommand("find n/Submit progress report"); int filteredTaskListIndex = 1; int taskBossIndex = 8; TestTask taskToMarkDone = expectedTasksList[taskBossIndex - 1]; TestTask markedDoneTask = new TaskBuilder(taskToMarkDone).withCategories("Done").build(); assertMarkDoneSuccess(filteredTaskListIndex, taskBossIndex, markedDoneTask); } @Test public void markDone_missingTaskIndex_failure() { commandBox.runCommand("done "); assertResultMessage(String.format(MESSAGE_INVALID_COMMAND_FORMAT, MarkDoneCommand.MESSAGE_USAGE)); } @Test public void markDone_invalidTaskIndex_failure() { commandBox.runCommand("done 8"); assertResultMessage(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX); } private void assertMarkDoneSuccess(int filteredTaskListIndex, int taskBossIndex, TestTask markedDoneTask) { commandBox.runCommand("done " + filteredTaskListIndex); // confirm the list now contains all previous tasks plus the task with updated details expectedTasksList[taskBossIndex - 1] = markedDoneTask; assertTrue(taskListPanel.isListMatching(expectedTasksList)); assertResultMessage(String.format(MarkDoneCommand.MESSAGE_MARK_TASK_DONE_SUCCESS , markedDoneTask)); } }
src/test/java/guitests/MarkDoneCommandTest.java
package guitests; import static org.junit.Assert.assertTrue; import static seedu.taskboss.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import org.junit.Test; import seedu.taskboss.commons.core.Messages; import seedu.taskboss.logic.commands.MarkDoneCommand; import seedu.taskboss.testutil.TaskBuilder; import seedu.taskboss.testutil.TestTask; public class MarkDoneCommandTest extends TaskBossGuiTest { // The list of tasks in the task list panel is expected to match this list. // This list is updated with every successful call to assertEditSuccess(). TestTask[] expectedTasksList = td.getTypicalTasks(); @Test public void markTaskDone_success() throws Exception { int taskBossIndex = 8; TestTask markedDoneTask = new TaskBuilder().withName("Submit progress report") .withPriorityLevel("3").withStartDateTime("Feb 9, 2017 5pm") .withEndDateTime("10am Feb 24, 2017 5pm") .withInformation("notify department head") .withCategories("Done").build(); assertMarkDoneSuccess(taskBossIndex, taskBossIndex, markedDoneTask); } @Test public void markDone_findThenMarkDone_success() throws Exception { commandBox.runCommand("find n/progress"); int filteredTaskListIndex = 1; int taskBossIndex = 8; TestTask taskToMarkDone = expectedTasksList[taskBossIndex - 1]; TestTask markedDoneTask = new TaskBuilder(taskToMarkDone).withCategories("Done").build(); assertMarkDoneSuccess(filteredTaskListIndex, taskBossIndex, markedDoneTask); } @Test public void markDone_missingTaskIndex_failure() { commandBox.runCommand("done "); assertResultMessage(String.format(MESSAGE_INVALID_COMMAND_FORMAT, MarkDoneCommand.MESSAGE_USAGE)); } @Test public void markDone_invalidTaskIndex_failure() { commandBox.runCommand("done 8"); assertResultMessage(Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX); } private void assertMarkDoneSuccess(int filteredTaskListIndex, int taskBossIndex, TestTask markedDoneTask) { commandBox.runCommand("done " + filteredTaskListIndex); // confirm the list now contains all previous tasks plus the task with updated details expectedTasksList[taskBossIndex - 1] = markedDoneTask; assertTrue(taskListPanel.isListMatching(expectedTasksList)); assertResultMessage(String.format(MarkDoneCommand.MESSAGE_MARK_TASK_DONE_SUCCESS , markedDoneTask)); } }
solve travis error
src/test/java/guitests/MarkDoneCommandTest.java
solve travis error
<ide><path>rc/test/java/guitests/MarkDoneCommandTest.java <ide> <ide> @Test <ide> public void markDone_findThenMarkDone_success() throws Exception { <del> commandBox.runCommand("find n/progress"); <add> commandBox.runCommand("find n/Submit progress report"); <ide> <ide> int filteredTaskListIndex = 1; <ide> int taskBossIndex = 8;
Java
apache-2.0
cdab79009768ff489751c2ef9a8bd847be910d74
0
twak/siteplan
package org.twak.siteplan.jme; import java.awt.Color; import java.nio.FloatBuffer; import java.util.ArrayList; import java.util.List; import javax.vecmath.Matrix4d; import javax.vecmath.Point2d; import javax.vecmath.Point3d; import javax.vecmath.Tuple3d; //import javax.vecmath.Vector2f; //import javax.vecmath.Vector3f; import javax.vecmath.Vector3d; import org.twak.utils.collections.Arrayz; import org.twak.utils.collections.Loop; import org.twak.utils.collections.LoopL; import org.twak.utils.collections.Loopable; import org.twak.utils.collections.Loopz; import org.twak.utils.geom.Line3d; import org.twak.utils.geom.ObjDump; import com.jme3.app.SimpleApplication; import com.jme3.asset.AssetManager; import com.jme3.material.MatParam; import com.jme3.material.Material; import com.jme3.math.ColorRGBA; import com.jme3.math.Matrix4f; import com.jme3.math.Transform; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.scene.Geometry; import com.jme3.scene.Mesh; import com.jme3.scene.Mesh.Mode; import com.jme3.scene.Node; import com.jme3.scene.Spatial; import com.jme3.scene.VertexBuffer; import com.jme3.scene.VertexBuffer.Type; import com.jme3.scene.mesh.IndexBuffer; import com.jme3.scene.shape.Box; import com.jme3.texture.Texture2D; import com.jme3.util.BufferUtils; public class Jme3z { public static final Vector3f UP = new Vector3f(0,1,0); public static Matrix4f toJme( Matrix4d m ) { return new Matrix4f( (float) m.m00, (float) m.m01, (float) m.m02, (float) m.m03, (float) m.m10, (float) m.m11, (float) m.m12, (float) m.m13, (float) m.m20, (float) m.m21, (float) m.m22, (float) m.m23, (float) m.m30, (float) m.m31, (float) m.m32, (float) m.m33 ); } public static Transform toJmeTransform( Matrix4d m ) { Transform out = new Transform(); out.fromTransformMatrix( toJme( m ) ); return out; } public static Matrix4d fromMatrix( Matrix4f m ) { return new Matrix4d( m.m00, m.m01, m.m02, m.m03, m.m10, m.m11, m.m12, m.m13, m.m20, m.m21, m.m22, m.m23, m.m30, m.m31, m.m32, m.m33 ); } public static Mesh fromLoop( Loop<Point3d> in ) { if ( !in.holes.isEmpty() ) { LoopL<Point3d> res =new LoopL<Point3d>( in ); in = res.isEmpty() ? in : res.get( 0 ); } Mesh m = new Mesh(); m.setMode( Mesh.Mode.Triangles ); List<Integer> inds = new ArrayList<>(); List<Float> pos = new ArrayList<>(); List<Float> norms = new ArrayList<>(); Loopz.triangulate( in, false, inds, pos, norms ); m.setBuffer( Type.Index, 3, Arrayz.toIntArray( inds ) ); m.setBuffer( Type.Position, 3, Arrayz.toFloatArray( pos ) ); m.setBuffer( Type.Normal, 3, Arrayz.toFloatArray( norms ) ); return m; } public static Spatial fromLoop( AssetManager am, LoopL<Point2d> gis, double h ) { Node out = new Node(); for ( Loop<Point2d> loop : gis ) { Mesh m = new Mesh(); m.setMode( Mesh.Mode.Lines ); List<Float> coords = new ArrayList<>(); List<Integer> inds = new ArrayList<>(); for ( Loopable<Point2d> ll : loop.loopableIterator() ) { inds.add( inds.size() ); inds.add( inds.size() ); Point3d a = new Point3d( ll.get().x, h, ll.get().y ), b = new Point3d( ll.getNext().get().x, h, ll.getNext().get().y ); coords.add( (float) a.x ); coords.add( (float) a.y ); coords.add( (float) a.z ); coords.add( (float) b.x ); coords.add( (float) b.y ); coords.add( (float) b.z ); { Box box1 = new Box( 0.5f, 0.5f, 0.5f ); Geometry geom = new Geometry( "Box", box1 ); Material mat1 = new Material( am, "Common/MatDefs/Misc/Unshaded.j3md" ); mat1.setColor( "Color", ColorRGBA.Magenta ); geom.setMaterial( mat1 ); geom.setLocalTranslation( Jme3z.toJmeVec( a ) ); out.attachChild( geom ); } } m.setBuffer( VertexBuffer.Type.Position, 3, Arrayz.toFloatArray( coords ) ); m.setBuffer( VertexBuffer.Type.Index, 2, Arrayz.toIntArray( inds ) ); Geometry geom = new Geometry( "profile", m ); Material lineMaterial = new Material( am, "Common/MatDefs/Misc/Unshaded.j3md" ); lineMaterial.getAdditionalRenderState().setLineWidth( 3 ); lineMaterial.setColor( "Color", ColorRGBA.Pink ); geom.setMaterial( lineMaterial ); out.attachChild( geom ); } return out; } public static com.jme3.math.Vector3f toJmeVec( Tuple3d a ) { return new com.jme3.math.Vector3f( (float) a.x, (float) a.y, (float) a.z ); } public static Spatial lines( AssetManager am, List<Line3d> roofLines, ColorRGBA color, float width, boolean cuboid ) { Mesh m; Geometry geom; if ( cuboid ) { MeshBuilder mb = new MeshBuilder(); for ( Line3d l : roofLines ) mb.solidLine( l, width ); m = mb.getMesh(); geom = new Geometry( "3d lines", m ); Material mat = new Material( am, "Common/MatDefs/Light/Lighting.j3md" ); mat.setColor( "Diffuse", color ); mat.setColor( "Ambient", color.mult( 0.5f ) ); mat.setBoolean( "UseMaterialColors", true ); geom.setMaterial( mat ); } else { m = new Mesh(); m.setMode( Mesh.Mode.Lines ); List<Float> coords = new ArrayList<>(); List<Integer> inds = new ArrayList<>(); for ( Line3d line : roofLines ) { inds.add( inds.size() ); inds.add( inds.size() ); coords.add( (float) line.start.x ); coords.add( (float) line.start.y ); coords.add( (float) line.start.z ); coords.add( (float) line.end.x ); coords.add( (float) line.end.y ); coords.add( (float) line.end.z ); } m.setBuffer( VertexBuffer.Type.Position, 3, Arrayz.toFloatArray( coords ) ); m.setBuffer( VertexBuffer.Type.Index, 2, Arrayz.toIntArray( inds ) ); geom = new Geometry( "jmez lines", m ); Material lineMaterial = new Material( am, "Common/MatDefs/Misc/Unshaded.j3md" ); lineMaterial.getAdditionalRenderState().setLineWidth( width ); lineMaterial.setColor( "Color", color == null ? ColorRGBA.Pink : color ); geom.setMaterial( lineMaterial ); } geom.updateGeometricState(); geom.updateModelBound(); return geom; } public static void toObj( Mesh m, ObjDump dump, Transform transform ) { // todo: normals float[][] verts = new float[3][3]; Vector3f a = new Vector3f(), b = new Vector3f(), c = new Vector3f(); Matrix4f mat = transform.toTransformMatrix(); VertexBuffer pb = m.getBuffer( Type.Position ); VertexBuffer ub = m.getBuffer( Type.TexCoord ); IndexBuffer ib = m.getIndicesAsList(); FloatBuffer fpb = (FloatBuffer) pb.getData(), ubp = null; float[][] uvs = null; if ( ub != null && ub.getNumComponents() == 2 && ub.getNumElements() == pb.getNumElements() ) { ubp = (FloatBuffer) ub.getData(); uvs = new float[3][2]; } Vector3f v1 = new Vector3f(), v2 = new Vector3f(), v3 = new Vector3f(); Vector2f u1 = new Vector2f(), u2 = new Vector2f(), u3 = new Vector2f(); for ( int t = 0; t < m.getTriangleCount(); t++ ) { try { int vertIndex = t * 3; int vert1 = ib.get( vertIndex ); int vert2 = ib.get( vertIndex + 1 ); int vert3 = ib.get( vertIndex + 2 ); BufferUtils.populateFromBuffer( v1, fpb, vert1 ); BufferUtils.populateFromBuffer( v2, fpb, vert2 ); BufferUtils.populateFromBuffer( v3, fpb, vert3 ); a = mat.mult( v1 ); b = mat.mult( v2 ); c = mat.mult( v3 ); verts[ 0 ][ 0 ] = a.x; verts[ 0 ][ 1 ] = a.y; verts[ 0 ][ 2 ] = a.z; verts[ 1 ][ 0 ] = b.x; verts[ 1 ][ 1 ] = b.y; verts[ 1 ][ 2 ] = b.z; verts[ 2 ][ 0 ] = c.x; verts[ 2 ][ 1 ] = c.y; verts[ 2 ][ 2 ] = c.z; if ( uvs != null ) { BufferUtils.populateFromBuffer( u1, ubp, vert1 ); BufferUtils.populateFromBuffer( u2, ubp, vert2 ); BufferUtils.populateFromBuffer( u3, ubp, vert3 ); uvs[ 0 ][ 0 ] = u1.x; uvs[ 0 ][ 1 ] = u1.y; uvs[ 1 ][ 0 ] = u2.x; uvs[ 1 ][ 1 ] = u2.y; uvs[ 2 ][ 0 ] = u3.x; uvs[ 2 ][ 1 ] = u3.y; } dump.addFace( verts, uvs, null ); } catch ( Throwable th ) { th.printStackTrace(); } } } public static String MAT_KEY = "material"; public static void dump( ObjDump dump, Spatial spat, int i ) { if ( spat instanceof Node ) { // int c = 0; for ( Spatial s : ( (Node) spat ).getChildren() ) { Color color = null; String texture = null; if (s instanceof Geometry) { MatParam mp = ( (Geometry) s ).getMaterial().getParam( "Diffuse" ); if (mp != null) { ColorRGBA gCol = (ColorRGBA) mp.getValue(); if (gCol != null) color = new Color(gCol.getRed(), gCol.getGreen(), gCol.getBlue()); } mp = ( (Geometry) s ).getMaterial().getParam( "DiffuseMap" ); if (mp != null) texture = ((Texture2D) mp.getValue() ).getName(); } if (color != null) { if (texture != null) dump.setCurrentTexture( texture, s.getUserData( MAT_KEY ), color, 0.2 ); else dump.setCurrentMaterial( s.getUserData( MAT_KEY ), color, 0.2 ); } dump( dump, s, i+1 ); } } else if ( spat instanceof Geometry ) { Mesh m = ( (Geometry) spat ).getMesh(); if (m.getMode() == Mode.Lines || m.getMode() == Mode.LineLoop || m.getMode() == Mode.LineStrip ) return; Jme3z.toObj( m, dump, ((Geometry) spat).getLocalTransform() ); } } public static Vector3f to( Tuple3d l ) { return new Vector3f( (float) l.x, (float) l.y, (float) l.z ); } public static Vector3f toJme( javax.vecmath.Vector3f l ) { return new Vector3f( l.x, l.y, l.z); } public static Vector3f toJmeV( double x, double y, double z ) { return new Vector3f( (float)x, (float) y, (float) z ); } public static Vector3d from( com.jme3.math.Vector3f dir ) { return new Vector3d( dir.x, dir.y, dir.z); } public static void removeAllChildren( Node debug ) { for (Spatial s : debug.getChildren() ) s.removeFromParent(); } public static ColorRGBA toJme(Color c) { return new ColorRGBA( c.getRed() / 255f, c.getGreen() / 255f, c.getBlue() / 255f, c.getAlpha() / 255f ); } public static Material lit( SimpleApplication sa, double r, double g, double b ) { Material mat = new Material( sa.getAssetManager(), "Common/MatDefs/Light/Lighting.j3md" ); mat.setColor( "Diffuse", new ColorRGBA( (float) r, (float) g, (float) b, 1 ) ); mat.setColor( "Ambient", new ColorRGBA( (float) r, (float) g, (float) b, 1 ) ); mat.setBoolean( "UseMaterialColors", true ); return mat; } public static Point2d to2( Vector3f b ) { return new Point2d( b.x, b.z ); } public static boolean isLine( Mode mode ) { return mode == Mode.LineLoop || mode == Mode.Lines || mode == Mode.LineStrip; } }
src/org/twak/siteplan/jme/Jme3z.java
package org.twak.siteplan.jme; import java.awt.Color; import java.nio.FloatBuffer; import java.util.ArrayList; import java.util.List; import javax.vecmath.Matrix4d; import javax.vecmath.Point2d; import javax.vecmath.Point3d; import javax.vecmath.Tuple3d; //import javax.vecmath.Vector2f; //import javax.vecmath.Vector3f; import javax.vecmath.Vector3d; import org.twak.utils.collections.Arrayz; import org.twak.utils.collections.Loop; import org.twak.utils.collections.LoopL; import org.twak.utils.collections.Loopable; import org.twak.utils.collections.Loopz; import org.twak.utils.geom.Line3d; import org.twak.utils.geom.ObjDump; import com.jme3.app.SimpleApplication; import com.jme3.asset.AssetManager; import com.jme3.material.MatParam; import com.jme3.material.Material; import com.jme3.math.ColorRGBA; import com.jme3.math.Matrix4f; import com.jme3.math.Transform; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.scene.Geometry; import com.jme3.scene.Mesh; import com.jme3.scene.Mesh.Mode; import com.jme3.scene.Node; import com.jme3.scene.Spatial; import com.jme3.scene.VertexBuffer; import com.jme3.scene.VertexBuffer.Type; import com.jme3.scene.mesh.IndexBuffer; import com.jme3.scene.shape.Box; import com.jme3.texture.Texture2D; import com.jme3.util.BufferUtils; public class Jme3z { public static final Vector3f UP = new Vector3f(0,1,0); public static Matrix4f toJme( Matrix4d m ) { return new Matrix4f( (float) m.m00, (float) m.m01, (float) m.m02, (float) m.m03, (float) m.m10, (float) m.m11, (float) m.m12, (float) m.m13, (float) m.m20, (float) m.m21, (float) m.m22, (float) m.m23, (float) m.m30, (float) m.m31, (float) m.m32, (float) m.m33 ); } public static Transform toJmeTransform( Matrix4d m ) { Transform out = new Transform(); out.fromTransformMatrix( toJme( m ) ); return out; } public static Matrix4d fromMatrix( Matrix4f m ) { return new Matrix4d( m.m00, m.m01, m.m02, m.m03, m.m10, m.m11, m.m12, m.m13, m.m20, m.m21, m.m22, m.m23, m.m30, m.m31, m.m32, m.m33 ); } public static Mesh fromLoop( Loop<Point3d> in ) { if ( !in.holes.isEmpty() ) { LoopL<Point3d> res =new LoopL<Point3d>( in ); in = res.isEmpty() ? in : res.get( 0 ); } Mesh m = new Mesh(); m.setMode( Mesh.Mode.Triangles ); List<Integer> inds = new ArrayList<>(); List<Float> pos = new ArrayList<>(); List<Float> norms = new ArrayList<>(); Loopz.triangulate( in, false, inds, pos, norms ); m.setBuffer( Type.Index, 3, Arrayz.toIntArray( inds ) ); m.setBuffer( Type.Position, 3, Arrayz.toFloatArray( pos ) ); m.setBuffer( Type.Normal, 3, Arrayz.toFloatArray( norms ) ); return m; } public static Spatial fromLoop( AssetManager am, LoopL<Point2d> gis, double h ) { Node out = new Node(); for ( Loop<Point2d> loop : gis ) { Mesh m = new Mesh(); m.setMode( Mesh.Mode.Lines ); List<Float> coords = new ArrayList<>(); List<Integer> inds = new ArrayList<>(); for ( Loopable<Point2d> ll : loop.loopableIterator() ) { inds.add( inds.size() ); inds.add( inds.size() ); Point3d a = new Point3d( ll.get().x, h, ll.get().y ), b = new Point3d( ll.getNext().get().x, h, ll.getNext().get().y ); coords.add( (float) a.x ); coords.add( (float) a.y ); coords.add( (float) a.z ); coords.add( (float) b.x ); coords.add( (float) b.y ); coords.add( (float) b.z ); { Box box1 = new Box( 0.5f, 0.5f, 0.5f ); Geometry geom = new Geometry( "Box", box1 ); Material mat1 = new Material( am, "Common/MatDefs/Misc/Unshaded.j3md" ); mat1.setColor( "Color", ColorRGBA.Magenta ); geom.setMaterial( mat1 ); geom.setLocalTranslation( Jme3z.toJmeVec( a ) ); out.attachChild( geom ); } } m.setBuffer( VertexBuffer.Type.Position, 3, Arrayz.toFloatArray( coords ) ); m.setBuffer( VertexBuffer.Type.Index, 2, Arrayz.toIntArray( inds ) ); Geometry geom = new Geometry( "profile", m ); Material lineMaterial = new Material( am, "Common/MatDefs/Misc/Unshaded.j3md" ); lineMaterial.getAdditionalRenderState().setLineWidth( 3 ); lineMaterial.setColor( "Color", ColorRGBA.Pink ); geom.setMaterial( lineMaterial ); out.attachChild( geom ); } return out; } public static com.jme3.math.Vector3f toJmeVec( Tuple3d a ) { return new com.jme3.math.Vector3f( (float) a.x, (float) a.y, (float) a.z ); } public static Spatial lines( AssetManager am, List<Line3d> roofLines, ColorRGBA color, float width, boolean cuboid ) { Mesh m; Geometry geom; if ( cuboid ) { MeshBuilder mb = new MeshBuilder(); for ( Line3d l : roofLines ) mb.solidLine( l, width ); m = mb.getMesh(); geom = new Geometry( "3d lines", m ); Material mat = new Material( am, "Common/MatDefs/Light/Lighting.j3md" ); mat.setColor( "Diffuse", color ); mat.setColor( "Ambient", color.mult( 0.5f ) ); mat.setBoolean( "UseMaterialColors", true ); geom.setMaterial( mat ); } else { m = new Mesh(); m.setMode( Mesh.Mode.Lines ); List<Float> coords = new ArrayList<>(); List<Integer> inds = new ArrayList<>(); for ( Line3d line : roofLines ) { inds.add( inds.size() ); inds.add( inds.size() ); coords.add( (float) line.start.x ); coords.add( (float) line.start.y ); coords.add( (float) line.start.z ); coords.add( (float) line.end.x ); coords.add( (float) line.end.y ); coords.add( (float) line.end.z ); } m.setBuffer( VertexBuffer.Type.Position, 3, Arrayz.toFloatArray( coords ) ); m.setBuffer( VertexBuffer.Type.Index, 2, Arrayz.toIntArray( inds ) ); geom = new Geometry( "jmez lines", m ); Material lineMaterial = new Material( am, "Common/MatDefs/Misc/Unshaded.j3md" ); lineMaterial.getAdditionalRenderState().setLineWidth( width ); lineMaterial.setColor( "Color", color == null ? ColorRGBA.Pink : color ); geom.setMaterial( lineMaterial ); } geom.updateGeometricState(); geom.updateModelBound(); return geom; } public static void toObj( Mesh m, ObjDump dump, Transform transform ) { // todo: normals float[][] verts = new float[3][3]; Vector3f a = new Vector3f(), b = new Vector3f(), c = new Vector3f(); Matrix4f mat = transform.toTransformMatrix(); VertexBuffer pb = m.getBuffer( Type.Position ); VertexBuffer ub = m.getBuffer( Type.TexCoord ); IndexBuffer ib = m.getIndicesAsList(); FloatBuffer fpb = (FloatBuffer) pb.getData(), ubp = null; float[][] uvs = null; if ( ub != null && ub.getNumComponents() == 2 && ub.getNumElements() == pb.getNumElements() ) { ubp = (FloatBuffer) ub.getData(); uvs = new float[3][2]; } Vector3f v1 = new Vector3f(), v2 = new Vector3f(), v3 = new Vector3f(); Vector2f u1 = new Vector2f(), u2 = new Vector2f(), u3 = new Vector2f(); for ( int t = 0; t < m.getTriangleCount(); t++ ) { try { int vertIndex = t * 3; int vert1 = ib.get( vertIndex ); int vert2 = ib.get( vertIndex + 1 ); int vert3 = ib.get( vertIndex + 2 ); BufferUtils.populateFromBuffer( v1, fpb, vert1 ); BufferUtils.populateFromBuffer( v2, fpb, vert2 ); BufferUtils.populateFromBuffer( v3, fpb, vert3 ); a = mat.mult( v1 ); b = mat.mult( v2 ); c = mat.mult( v3 ); verts[ 0 ][ 0 ] = v1.x; verts[ 0 ][ 1 ] = v1.y; verts[ 0 ][ 2 ] = v1.z; verts[ 1 ][ 0 ] = v2.x; verts[ 1 ][ 1 ] = v2.y; verts[ 1 ][ 2 ] = v2.z; verts[ 2 ][ 0 ] = v3.x; verts[ 2 ][ 1 ] = v3.y; verts[ 2 ][ 2 ] = v3.z; if ( uvs != null ) { BufferUtils.populateFromBuffer( u1, ubp, vert1 ); BufferUtils.populateFromBuffer( u2, ubp, vert2 ); BufferUtils.populateFromBuffer( u3, ubp, vert3 ); uvs[ 0 ][ 0 ] = u1.x; uvs[ 0 ][ 1 ] = u1.y; uvs[ 1 ][ 0 ] = u2.x; uvs[ 1 ][ 1 ] = u2.y; uvs[ 2 ][ 0 ] = u3.x; uvs[ 2 ][ 1 ] = u3.y; } dump.addFace( verts, uvs, null ); } catch ( Throwable th ) { th.printStackTrace(); } } } public static String MAT_KEY = "material"; public static void dump( ObjDump dump, Spatial spat, int i ) { if ( spat instanceof Node ) { // int c = 0; for ( Spatial s : ( (Node) spat ).getChildren() ) { Color color = null; String texture = null; if (s instanceof Geometry) { MatParam mp = ( (Geometry) s ).getMaterial().getParam( "Diffuse" ); if (mp != null) { ColorRGBA gCol = (ColorRGBA) mp.getValue(); if (gCol != null) color = new Color(gCol.getRed(), gCol.getGreen(), gCol.getBlue()); } mp = ( (Geometry) s ).getMaterial().getParam( "DiffuseMap" ); if (mp != null) texture = ((Texture2D) mp.getValue() ).getName(); } if (color != null) { if (texture != null) dump.setCurrentTexture( texture, s.getUserData( MAT_KEY ), color, 0.2 ); else dump.setCurrentMaterial( s.getUserData( MAT_KEY ), color, 0.2 ); } dump( dump, s, i+1 ); } } else if ( spat instanceof Geometry ) { Mesh m = ( (Geometry) spat ).getMesh(); if (m.getMode() == Mode.Lines || m.getMode() == Mode.LineLoop || m.getMode() == Mode.LineStrip ) return; Jme3z.toObj( m, dump, ((Geometry) spat).getLocalTransform() ); } } public static Vector3f to( Tuple3d l ) { return new Vector3f( (float) l.x, (float) l.y, (float) l.z ); } public static Vector3f toJme( javax.vecmath.Vector3f l ) { return new Vector3f( l.x, l.y, l.z); } public static Vector3f toJmeV( double x, double y, double z ) { return new Vector3f( (float)x, (float) y, (float) z ); } public static Vector3d from( com.jme3.math.Vector3f dir ) { return new Vector3d( dir.x, dir.y, dir.z); } public static void removeAllChildren( Node debug ) { for (Spatial s : debug.getChildren() ) s.removeFromParent(); } public static ColorRGBA toJme(Color c) { return new ColorRGBA( c.getRed() / 255f, c.getGreen() / 255f, c.getBlue() / 255f, c.getAlpha() / 255f ); } public static Material lit( SimpleApplication sa, double r, double g, double b ) { Material mat = new Material( sa.getAssetManager(), "Common/MatDefs/Light/Lighting.j3md" ); mat.setColor( "Diffuse", new ColorRGBA( (float) r, (float) g, (float) b, 1 ) ); mat.setColor( "Ambient", new ColorRGBA( (float) r, (float) g, (float) b, 1 ) ); mat.setBoolean( "UseMaterialColors", true ); return mat; } public static Point2d to2( Vector3f b ) { return new Point2d( b.x, b.z ); } public static boolean isLine( Mode mode ) { return mode == Mode.LineLoop || mode == Mode.Lines || mode == Mode.LineStrip; } }
use local translation when writing obj
src/org/twak/siteplan/jme/Jme3z.java
use local translation when writing obj
<ide><path>rc/org/twak/siteplan/jme/Jme3z.java <ide> b = mat.mult( v2 ); <ide> c = mat.mult( v3 ); <ide> <del> verts[ 0 ][ 0 ] = v1.x; <del> verts[ 0 ][ 1 ] = v1.y; <del> verts[ 0 ][ 2 ] = v1.z; <del> verts[ 1 ][ 0 ] = v2.x; <del> verts[ 1 ][ 1 ] = v2.y; <del> verts[ 1 ][ 2 ] = v2.z; <del> verts[ 2 ][ 0 ] = v3.x; <del> verts[ 2 ][ 1 ] = v3.y; <del> verts[ 2 ][ 2 ] = v3.z; <add> verts[ 0 ][ 0 ] = a.x; <add> verts[ 0 ][ 1 ] = a.y; <add> verts[ 0 ][ 2 ] = a.z; <add> verts[ 1 ][ 0 ] = b.x; <add> verts[ 1 ][ 1 ] = b.y; <add> verts[ 1 ][ 2 ] = b.z; <add> verts[ 2 ][ 0 ] = c.x; <add> verts[ 2 ][ 1 ] = c.y; <add> verts[ 2 ][ 2 ] = c.z; <ide> <ide> if ( uvs != null ) { <ide>
Java
apache-2.0
71778cf6683100885e5344740386068c3914460b
0
google/blockly-android,google/blockly-android,google/blockly-android
/* * Copyright 2016 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.blockly.android.ui.fieldview; import static com.google.common.truth.Truth.assertThat; import com.google.blockly.android.BlocklyTestCase; import com.google.blockly.model.FieldNumber; import org.junit.Before; import org.junit.Test; /** * Tests for {@link BasicFieldNumberView}. */ public class BasicFieldNumberViewTest extends BlocklyTestCase { private static final double INITIAL_POS_INT = 5d; private static final double INITIAL_SIGNED_INT = -5d; private static final double INITIAL_POS_DECIMAL = Math.PI; private static final double INITIAL_SIGNED_DECIMAL = -Math.PI; /** * A regex to match the decimal string equivalent of {@link Math#PI}, accounting for minor * platform variations. (Don't ask me why.) */ private static final String PI_STRING_REGEX = "3\\.141592653589793?"; // Cannot mock final classes. private FieldNumber mFieldPosInt; private FieldNumber mFieldPosDecimal; private FieldNumber mFieldSignedInt; private FieldNumber mFieldSignedDecimal; // Subject of tests. private BasicFieldNumberView view; @Before public void setUp() throws Exception { configureForUIThread(); view = new BasicFieldNumberView(getContext()); mFieldPosInt = new FieldNumber("POSITIVE_INTEGER"); mFieldPosInt.setConstraints(0d, Double.NaN, 1d); // min, max, precision mFieldPosInt.setValue(INITIAL_POS_INT); mFieldPosDecimal = new FieldNumber("POSITIVE_DECIMAL"); mFieldPosDecimal.setConstraints(0d, Double.NaN, Double.NaN); mFieldPosDecimal.setValue(INITIAL_POS_DECIMAL); mFieldSignedInt = new FieldNumber("SIGNED_INTEGER"); mFieldSignedInt.setConstraints(Double.NaN, Double.NaN, 1d); mFieldSignedInt.setValue(INITIAL_SIGNED_INT); mFieldSignedDecimal = new FieldNumber("SIGNED_DECIMAL"); // Default constraints. assertThat(mFieldSignedDecimal.hasMinimum()).isFalse(); assertThat(mFieldSignedDecimal.hasMaximum()).isFalse(); assertThat(mFieldSignedDecimal.hasPrecision()).isFalse(); mFieldSignedDecimal.setValue(INITIAL_SIGNED_DECIMAL); } /** * Verifies {@link BasicFieldNumberView#setField} updates the associated field and the text * value presented to the user. */ @Test public void testSetField() { view.setField(mFieldPosInt); assertThat(view.getField()).isSameAs(mFieldPosInt); assertThat(view.getText().toString()) .isEqualTo("5"); view.setField(mFieldSignedInt); assertThat(view.getField()).isSameAs(mFieldSignedInt); assertThat(view.getText().toString()) .isEqualTo("-5"); // The following is the maximum precision presentation of PI with a double. view.setField(mFieldPosDecimal); assertThat(view.getField()).isSameAs(mFieldPosDecimal); assertThat(view.getText().toString()).matches(PI_STRING_REGEX); view.setField(mFieldSignedDecimal); assertThat(view.getField()).isSameAs(mFieldSignedDecimal); assertThat(view.getText().toString()).matches("-" + PI_STRING_REGEX); } }
blocklytest/src/androidTest/java/com/google/blockly/android/ui/fieldview/BasicFieldNumberViewTest.java
/* * Copyright 2016 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.blockly.android.ui.fieldview; import static com.google.common.truth.Truth.assertThat; import com.google.blockly.android.BlocklyTestCase; import com.google.blockly.model.FieldNumber; import org.junit.Before; import org.junit.Test; /** * Tests for {@link BasicFieldNumberView}. */ public class BasicFieldNumberViewTest extends BlocklyTestCase { private static final double INITIAL_POS_INT = 5d; private static final double INITIAL_SIGNED_INT = -5d; private static final double INITIAL_POS_DECIMAL = Math.PI; private static final double INITIAL_SIGNED_DECIMAL = -Math.PI; /** The decimal string equivalent of {@link Math#PI}. */ private static final String PI_STRING = "3.141592653589793"; // Cannot mock final classes. private FieldNumber mFieldPosInt; private FieldNumber mFieldPosDecimal; private FieldNumber mFieldSignedInt; private FieldNumber mFieldSignedDecimal; // Subject of tests. private BasicFieldNumberView view; @Before public void setUp() throws Exception { configureForUIThread(); view = new BasicFieldNumberView(getContext()); mFieldPosInt = new FieldNumber("POSITIVE_INTEGER"); mFieldPosInt.setConstraints(0d, Double.NaN, 1d); // min, max, precision mFieldPosInt.setValue(INITIAL_POS_INT); mFieldPosDecimal = new FieldNumber("POSITIVE_DECIMAL"); mFieldPosDecimal.setConstraints(0d, Double.NaN, Double.NaN); mFieldPosDecimal.setValue(INITIAL_POS_DECIMAL); mFieldSignedInt = new FieldNumber("SIGNED_INTEGER"); mFieldSignedInt.setConstraints(Double.NaN, Double.NaN, 1d); mFieldSignedInt.setValue(INITIAL_SIGNED_INT); mFieldSignedDecimal = new FieldNumber("SIGNED_DECIMAL"); // Default constraints. assertThat(mFieldSignedDecimal.hasMinimum()).isFalse(); assertThat(mFieldSignedDecimal.hasMaximum()).isFalse(); assertThat(mFieldSignedDecimal.hasPrecision()).isFalse(); mFieldSignedDecimal.setValue(INITIAL_SIGNED_DECIMAL); } /** * Verifies {@link BasicFieldNumberView#setField} updates the associated field and the text * value presented to the user. */ @Test public void testSetField() { view.setField(mFieldPosInt); assertThat(view.getField()).isSameAs(mFieldPosInt); assertThat(view.getText().toString()) .isEqualTo("5"); view.setField(mFieldSignedInt); assertThat(view.getField()).isSameAs(mFieldSignedInt); assertThat(view.getText().toString()) .isEqualTo("-5"); // The following is the maximum precision presentation of PI with a double. view.setField(mFieldPosDecimal); assertThat(view.getField()).isSameAs(mFieldPosDecimal); assertThat(view.getText().toString()) .isEqualTo(PI_STRING); view.setField(mFieldSignedDecimal); assertThat(view.getField()).isSameAs(mFieldSignedDecimal); assertThat(view.getText().toString()) .isEqualTo("-" + PI_STRING); } }
Using a regex instead of a string literal, because whatever Java platform Travis uses seems to render the string without the final character.
blocklytest/src/androidTest/java/com/google/blockly/android/ui/fieldview/BasicFieldNumberViewTest.java
Using a regex instead of a string literal, because whatever Java platform Travis uses seems to render the string without the final character.
<ide><path>locklytest/src/androidTest/java/com/google/blockly/android/ui/fieldview/BasicFieldNumberViewTest.java <ide> private static final double INITIAL_POS_DECIMAL = Math.PI; <ide> private static final double INITIAL_SIGNED_DECIMAL = -Math.PI; <ide> <del> /** The decimal string equivalent of {@link Math#PI}. */ <del> private static final String PI_STRING = "3.141592653589793"; <add> /** <add> * A regex to match the decimal string equivalent of {@link Math#PI}, accounting for minor <add> * platform variations. (Don't ask me why.) <add> */ <add> private static final String PI_STRING_REGEX = "3\\.141592653589793?"; <ide> <ide> <ide> // Cannot mock final classes. <ide> // The following is the maximum precision presentation of PI with a double. <ide> view.setField(mFieldPosDecimal); <ide> assertThat(view.getField()).isSameAs(mFieldPosDecimal); <del> assertThat(view.getText().toString()) <del> .isEqualTo(PI_STRING); <add> assertThat(view.getText().toString()).matches(PI_STRING_REGEX); <ide> <ide> view.setField(mFieldSignedDecimal); <ide> assertThat(view.getField()).isSameAs(mFieldSignedDecimal); <del> assertThat(view.getText().toString()) <del> .isEqualTo("-" + PI_STRING); <add> assertThat(view.getText().toString()).matches("-" + PI_STRING_REGEX); <ide> } <ide> }
Java
epl-1.0
8306f833fec0d3ef044991ed73913b010fe3a1f1
0
markcullen/kura_Windows,unverbraucht/kura,markcullen/kura_Windows,darionct/kura,cdealti/kura,gavinying/kura,MMaiero/kura,nicolatimeus/kura,markoer/kura,darionct/kura,darionct/kura,gavinying/kura,amitjoy/kura,gavinying/kura,cdealti/kura,ctron/kura,amitjoy/kura,markoer/kura,ctron/kura,ctron/kura,unverbraucht/kura,cdealti/kura,rohitdubey12/kura,cdealti/kura,cdealti/kura,rohitdubey12/kura,rohitdubey12/kura,MMaiero/kura,ymai/kura,unverbraucht/kura,ymai/kura,unverbraucht/kura,nicolatimeus/kura,rohitdubey12/kura,darionct/kura,markoer/kura,amitjoy/kura,MMaiero/kura,ctron/kura,markcullen/kura_Windows,ymai/kura,nicolatimeus/kura,ymai/kura,darionct/kura,nicolatimeus/kura,unverbraucht/kura,gavinying/kura,amitjoy/kura,rohitdubey12/kura,cdealti/kura,ymai/kura,nicolatimeus/kura,markoer/kura,markcullen/kura_Windows,amitjoy/kura,amitjoy/kura,ymai/kura,darionct/kura,nicolatimeus/kura,MMaiero/kura,MMaiero/kura,markcullen/kura_Windows,ctron/kura,gavinying/kura,ctron/kura,markoer/kura,markoer/kura,MMaiero/kura,gavinying/kura
/** * Copyright (c) 2011, 2014 Eurotech and/or its affiliates * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Eurotech */ package org.eclipse.kura.linux.net.modem; import org.eclipse.kura.core.linux.util.LinuxProcessUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class UsbModemDriver extends ModemDriver { private static final Logger s_logger = LoggerFactory.getLogger(UsbModemDriver.class); private String m_name; private String m_vendor; private String m_product; public UsbModemDriver (String name, String vendor, String product) { m_name = name; m_vendor = vendor; m_product = product; } public int install() throws Exception { s_logger.info("installing driver: {}", m_name); return LinuxProcessUtil.start("modprobe " + m_name, true); } public int remove() throws Exception { s_logger.info("removing driver: {}", m_name); return LinuxProcessUtil.start("rmmod " + m_name, true); } public String getName() { return m_name; } public String getVendor() { return m_vendor; } public String getProduct() { return m_product; } }
kura/org.eclipse.kura.linux.net/src/main/java/org/eclipse/kura/linux/net/modem/UsbModemDriver.java
/** * Copyright (c) 2011, 2014 Eurotech and/or its affiliates * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Eurotech */ package org.eclipse.kura.linux.net.modem; import org.eclipse.kura.core.linux.util.LinuxProcessUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class UsbModemDriver { private static final Logger s_logger = LoggerFactory.getLogger(UsbModemDriver.class); private String m_name; public UsbModemDriver (String name) { m_name = name; } public int install() throws Exception { s_logger.info("installing driver: {}", m_name); return LinuxProcessUtil.start("modprobe " + m_name, true); } public int remove() throws Exception { s_logger.info("removing driver: {}", m_name); return LinuxProcessUtil.start("rmmod " + m_name, true); } public String getName() { return m_name; } }
Moved modem reset functionality to new ModemDriver class
kura/org.eclipse.kura.linux.net/src/main/java/org/eclipse/kura/linux/net/modem/UsbModemDriver.java
Moved modem reset functionality to new ModemDriver class
<ide><path>ura/org.eclipse.kura.linux.net/src/main/java/org/eclipse/kura/linux/net/modem/UsbModemDriver.java <ide> import org.slf4j.Logger; <ide> import org.slf4j.LoggerFactory; <ide> <del>public class UsbModemDriver { <add>public class UsbModemDriver extends ModemDriver { <ide> <ide> private static final Logger s_logger = LoggerFactory.getLogger(UsbModemDriver.class); <ide> <ide> private String m_name; <add> private String m_vendor; <add> private String m_product; <ide> <del> public UsbModemDriver (String name) { <add> public UsbModemDriver (String name, String vendor, String product) { <ide> m_name = name; <add> m_vendor = vendor; <add> m_product = product; <ide> } <ide> <ide> public int install() throws Exception { <ide> public String getName() { <ide> return m_name; <ide> } <add> <add> public String getVendor() { <add> return m_vendor; <add> } <add> <add> public String getProduct() { <add> return m_product; <add> } <ide> }
Java
apache-2.0
570f42977a54f71c229d07aea2c3871841e48de1
0
nssales/OG-Platform,McLeodMoores/starling,jerome79/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,DevStreet/FinanceAnalytics,jeorme/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,jeorme/OG-Platform,jeorme/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,codeaudit/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.model.forex.forward; import java.util.Map; import java.util.Set; import com.google.common.collect.ImmutableSet; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.target.ComputationTargetType; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.financial.analytics.CurrencyLabelledMatrix1D; import com.opengamma.financial.analytics.model.CalculationPropertyNamesAndValues; import com.opengamma.financial.analytics.model.forex.ForexVisitors; import com.opengamma.financial.currency.CurrencyMatrixSpotSourcingFunction; import com.opengamma.financial.security.FinancialSecurity; import com.opengamma.financial.security.FinancialSecurityTypes; import com.opengamma.util.async.AsynchronousExecution; import com.opengamma.util.money.Currency; /** * Calculates Present Value on FX Forward instruments. */ public class FXForwardPresentValueFunction extends AbstractFunction.NonCompiledInvoker { @Override public ComputationTargetType getTargetType() { return FinancialSecurityTypes.FX_FORWARD_SECURITY.or(FinancialSecurityTypes.NON_DELIVERABLE_FX_FORWARD_SECURITY); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { return ImmutableSet.of(new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), ValueProperties.all())); } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final ValueProperties properties = ValueProperties.builder() .with(ValuePropertyNames.CALCULATION_METHOD, CalculationPropertyNamesAndValues.DISCOUNTING) .get(); final ValueRequirement fxPvRequirement = new ValueRequirement(ValueRequirementNames.FX_PRESENT_VALUE, target.toSpecification(), desiredValue.getConstraints()); final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); final Currency payCurrency = getPayCurrency(security); final Currency receiveCurrency = getReceiveCurrency(security); final ValueRequirement spotRateRequirement = CurrencyMatrixSpotSourcingFunction.getConversionRequirement(payCurrency, receiveCurrency); return ImmutableSet.of(fxPvRequirement, spotRateRequirement); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) { ValueProperties properties = null; for (final Map.Entry<ValueSpecification, ValueRequirement> entry : inputs.entrySet()) { if (entry.getKey().getValueName().equals(ValueRequirementNames.FX_PRESENT_VALUE)) { properties = entry.getKey().getProperties(); break; } } if (properties == null) { return null; } final Currency currency = getPayCurrency((FinancialSecurity) target.getSecurity()); return ImmutableSet.of(new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), getResultProperties(currency, properties.copy()))); } @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) throws AsynchronousExecution { final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); final Currency payCurrency = getPayCurrency(security); final Currency receiveCurrency = getReceiveCurrency(security); final ComputedValue input = inputs.getComputedValue(ValueRequirementNames.FX_PRESENT_VALUE); final ValueSpecification inputSpec = input.getSpecification(); final CurrencyLabelledMatrix1D fxPresentValue = (CurrencyLabelledMatrix1D) input.getValue(); if (fxPresentValue.size() != 2) { throw new OpenGammaRuntimeException("Expected " + ValueRequirementNames.FX_PRESENT_VALUE + " input to contain 2 currency values, but found " + fxPresentValue.size()); } int payIndex = -1; int receiveIndex = -1; for (int i = 0; i < 2; i++) { final Currency currency = fxPresentValue.getKeys()[i]; if (payCurrency.equals(currency)) { payIndex = i; } else if (receiveCurrency.equals(currency)) { receiveIndex = i; } else { throw new OpenGammaRuntimeException(ValueRequirementNames.FX_PRESENT_VALUE + " contains unexpected currency " + currency + ". Expected " + payCurrency + " or " + receiveCurrency + "."); } } final double payValue = fxPresentValue.getValues()[payIndex]; final double receiveValue = fxPresentValue.getValues()[receiveIndex]; final double spot = (Double) inputs.getValue(ValueRequirementNames.SPOT_RATE); final double pv = payValue + spot * receiveValue; return ImmutableSet.of(new ComputedValue(getResultSpec(target, inputSpec.getProperties().copy()), pv)); } protected ValueSpecification getResultSpec(final ComputationTarget target, final ValueProperties.Builder fxPresentValueProperties) { final Currency currency = getPayCurrency((FinancialSecurity) target.getSecurity()); return new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), getResultProperties(currency, fxPresentValueProperties)); } protected ValueProperties getResultProperties(final Currency currency, final ValueProperties.Builder fxPresentValueProperties) { return fxPresentValueProperties.withoutAny(ValuePropertyNames.FUNCTION) .with(ValuePropertyNames.FUNCTION, getUniqueId()) .with(ValuePropertyNames.CURRENCY, currency.getCode()) .get(); } protected Currency getPayCurrency(final FinancialSecurity security) { return security.accept(ForexVisitors.getPayCurrencyVisitor()); } protected Currency getReceiveCurrency(final FinancialSecurity security) { return security.accept(ForexVisitors.getReceiveCurrencyVisitor()); } }
projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/forex/forward/FXForwardPresentValueFunction.java
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.model.forex.forward; import java.util.Map; import java.util.Set; import com.google.common.collect.ImmutableSet; import com.opengamma.OpenGammaRuntimeException; import com.opengamma.engine.ComputationTarget; import com.opengamma.engine.function.AbstractFunction; import com.opengamma.engine.function.FunctionCompilationContext; import com.opengamma.engine.function.FunctionExecutionContext; import com.opengamma.engine.function.FunctionInputs; import com.opengamma.engine.target.ComputationTargetType; import com.opengamma.engine.value.ComputedValue; import com.opengamma.engine.value.ValueProperties; import com.opengamma.engine.value.ValuePropertyNames; import com.opengamma.engine.value.ValueRequirement; import com.opengamma.engine.value.ValueRequirementNames; import com.opengamma.engine.value.ValueSpecification; import com.opengamma.financial.analytics.CurrencyLabelledMatrix1D; import com.opengamma.financial.analytics.model.CalculationPropertyNamesAndValues; import com.opengamma.financial.analytics.model.forex.ForexVisitors; import com.opengamma.financial.currency.CurrencyMatrixSpotSourcingFunction; import com.opengamma.financial.security.FinancialSecurity; import com.opengamma.financial.security.FinancialSecurityTypes; import com.opengamma.util.async.AsynchronousExecution; import com.opengamma.util.money.Currency; /** * Calculates Present Value on FX Forward instruments. */ public class FXForwardPresentValueFunction extends AbstractFunction.NonCompiledInvoker { @Override public ComputationTargetType getTargetType() { return FinancialSecurityTypes.FX_FORWARD_SECURITY.or(FinancialSecurityTypes.NON_DELIVERABLE_FX_FORWARD_SECURITY); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { return ImmutableSet.of(new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), ValueProperties.all())); } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final ValueProperties properties = ValueProperties.builder() .with(ValuePropertyNames.CALCULATION_METHOD, CalculationPropertyNamesAndValues.DISCOUNTING) .get(); final ValueRequirement fxPvRequirement = new ValueRequirement(ValueRequirementNames.FX_PRESENT_VALUE, target.toSpecification(), properties); final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); final Currency payCurrency = getPayCurrency(security); final Currency receiveCurrency = getReceiveCurrency(security); final ValueRequirement spotRateRequirement = CurrencyMatrixSpotSourcingFunction.getConversionRequirement(payCurrency, receiveCurrency); return ImmutableSet.of(fxPvRequirement, spotRateRequirement); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) { ValueProperties properties = null; for (final Map.Entry<ValueSpecification, ValueRequirement> entry : inputs.entrySet()) { if (entry.getKey().getValueName().equals(ValueRequirementNames.FX_PRESENT_VALUE)) { properties = entry.getKey().getProperties(); break; } } if (properties == null) { return null; } final Currency currency = getPayCurrency((FinancialSecurity) target.getSecurity()); return ImmutableSet.of(new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), getResultProperties(currency, properties.copy()))); } @Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) throws AsynchronousExecution { final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); final Currency payCurrency = getPayCurrency(security); final Currency receiveCurrency = getReceiveCurrency(security); final ComputedValue input = inputs.getComputedValue(ValueRequirementNames.FX_PRESENT_VALUE); final ValueSpecification inputSpec = input.getSpecification(); final CurrencyLabelledMatrix1D fxPresentValue = (CurrencyLabelledMatrix1D) input.getValue(); if (fxPresentValue.size() != 2) { throw new OpenGammaRuntimeException("Expected " + ValueRequirementNames.FX_PRESENT_VALUE + " input to contain 2 currency values, but found " + fxPresentValue.size()); } int payIndex = -1; int receiveIndex = -1; for (int i = 0; i < 2; i++) { final Currency currency = fxPresentValue.getKeys()[i]; if (payCurrency.equals(currency)) { payIndex = i; } else if (receiveCurrency.equals(currency)) { receiveIndex = i; } else { throw new OpenGammaRuntimeException(ValueRequirementNames.FX_PRESENT_VALUE + " contains unexpected currency " + currency + ". Expected " + payCurrency + " or " + receiveCurrency + "."); } } final double payValue = fxPresentValue.getValues()[payIndex]; final double receiveValue = fxPresentValue.getValues()[receiveIndex]; final double spot = (Double) inputs.getValue(ValueRequirementNames.SPOT_RATE); final double pv = payValue + spot * receiveValue; return ImmutableSet.of(new ComputedValue(getResultSpec(target, inputSpec.getProperties().copy()), pv)); } protected ValueSpecification getResultSpec(final ComputationTarget target, final ValueProperties.Builder fxPresentValueProperties) { final Currency currency = getPayCurrency((FinancialSecurity) target.getSecurity()); return new ValueSpecification(ValueRequirementNames.PRESENT_VALUE, target.toSpecification(), getResultProperties(currency, fxPresentValueProperties)); } protected ValueProperties getResultProperties(final Currency currency, final ValueProperties.Builder fxPresentValueProperties) { return fxPresentValueProperties.withoutAny(ValuePropertyNames.FUNCTION) .with(ValuePropertyNames.FUNCTION, getUniqueId()) .with(ValuePropertyNames.CURRENCY, currency.getCode()) .get(); } protected Currency getPayCurrency(final FinancialSecurity security) { return security.accept(ForexVisitors.getPayCurrencyVisitor()); } protected Currency getReceiveCurrency(final FinancialSecurity security) { return security.accept(ForexVisitors.getReceiveCurrencyVisitor()); } }
Passing all properties through when converting FX present value to present value
projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/forex/forward/FXForwardPresentValueFunction.java
Passing all properties through when converting FX present value to present value
<ide><path>rojects/OG-Financial/src/main/java/com/opengamma/financial/analytics/model/forex/forward/FXForwardPresentValueFunction.java <ide> final ValueProperties properties = ValueProperties.builder() <ide> .with(ValuePropertyNames.CALCULATION_METHOD, CalculationPropertyNamesAndValues.DISCOUNTING) <ide> .get(); <del> final ValueRequirement fxPvRequirement = new ValueRequirement(ValueRequirementNames.FX_PRESENT_VALUE, target.toSpecification(), properties); <add> final ValueRequirement fxPvRequirement = new ValueRequirement(ValueRequirementNames.FX_PRESENT_VALUE, target.toSpecification(), desiredValue.getConstraints()); <ide> final FinancialSecurity security = (FinancialSecurity) target.getSecurity(); <ide> final Currency payCurrency = getPayCurrency(security); <ide> final Currency receiveCurrency = getReceiveCurrency(security);
JavaScript
mit
4911ebd8a6b921b47109f6b50e79d5ed2b40cc41
0
kumavis/etherex,kustomzone/etherex,kustomzone/etherex,etherex/etherex,kumavis/etherex,kumavis/etherex,etherex/etherex,kustomzone/etherex,etherex/etherex
var utils = require("../js/utils"); var fixtures = require("../js/fixtures"); var bigRat = require('big-rational'); var EthereumClient = function() { this.loadAddresses = function(success, failure) { var addresses = eth.keys.map(function (k) { return eth.secretToAddress(k); }); if (addresses) success(addresses); else failure("Unable to load addresses. Lost your keys?"); }; this.loadMarkets = function(success, failure) { var markets = [{}]; var total = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(2)))); var ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(18)))); var last = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(19)))); console.log("TOTAL MARKETS: " + total); console.log("MARKETS START: " + ptr); console.log("MARKETS LAST: " + last); for (var i = 0; i < total; i++) { var id = eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+7))); console.log("LOADING MARKET ID: " + id); if (id) { markets.push({ id: id, name: eth.toAscii(eth.stateAt(fixtures.addresses.markets, String(ptr))), address: eth.stateAt(fixtures.addresses.markets, String(ptr+3)), amount: eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+1))), precision: eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+2))), price: eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+4))), decimals: eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+5))), }); } ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+9)))); }; if (markets) { success(markets); } else { failure("Unable to load markets. Make a wish!"); } }; this.setUserWatches = function(flux, addresses, markets) { if (ethBrowser) { // ETH balance eth.watch({altered: addresses}).changed(flux.actions.user.updateBalance); // Sub balances var market_addresses = _.rest(_.pluck(markets, 'address')); eth.watch({altered: market_addresses}).changed(flux.actions.user.updateBalanceSub); } else { for (var i = addresses.length - 1; i >= 0; i--) { eth.watch(addresses[i], "", flux.actions.user.updateBalance); flux.actions.user.updateBalanceSub(); for (var m = markets.length - 1; m >= 0; m--) eth.watch(markets[m].address, "", flux.actions.user.updateBalanceSub); } } }; this.setMarketWatches = function(flux, markets) { var market_addresses = _.rest(_.pluck(markets, 'address')); if (ethBrowser) { eth.watch({altered: market_addresses}).changed(flux.actions.trade.loadTrades); } else { flux.actions.trade.loadTrades(); // for (var i = market_addresses.length - 1; i >= 0; i--) { // flux.actions.trade.loadTrades(); // eth.watch(market_addresses[i], "", flux.actions.trade.loadTrades); // } } }; this.updateBalance = function(address, success, failure) { var confirmed = eth.toDecimal(eth.balanceAt(address, -1)); var unconfirmed = eth.toDecimal(eth.balanceAt(address)); var showUnconfirmed = false; if (unconfirmed != confirmed) { showUnconfirmed = true; unconfirmed = this.formatUnconfirmed(confirmed, unconfirmed); } if (confirmed >= 0) { success( utils.formatBalance(confirmed), showUnconfirmed ? "(" + unconfirmed + " unconfirmed)" : null ); } else { failure("Failed to update balance. We fell."); } }; this.updateBalanceSub = function(market, address, success, failure) { var confirmed = eth.toDecimal(eth.stateAt(market.address, address, -1)); var unconfirmed = eth.toDecimal(eth.stateAt(market.address, address)); var showUnconfirmed = false; // DEBUG // console.log("confirmed: " + confirmed); // console.log("unconfirmed: " + unconfirmed); // console.log(this.formatUnconfirmed(confirmed, unconfirmed)); if (unconfirmed != confirmed) { showUnconfirmed = true; unconfirmed = this.formatUnconfirmed(confirmed, unconfirmed); } if (confirmed >= 0) { success( utils.formatBalance(confirmed), showUnconfirmed ? "(" + unconfirmed + " unconfirmed)" : null ); } else { failure("Failed to update subcurrency balance. No dice."); } }; this.loadTrades = function(flux, markets, progress, success, failure) { var trades = []; var total = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(2)))); var ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(18)))); var last = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(19)))); var start = ptr; console.log("TOTAL TRADES: " + total); console.log("TRADES START: " + ptr); console.log("TRADES LAST: " + last); for (var i = 0; i < total; i++) { var type = eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr))); if (type) { var mid = eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+4))); console.log("Loading trade " + i + " for market " + markets[mid].name); trades.push({ id: ptr, type: type == 1 ? 'buy' : 'sell', price: bigRat( eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+1))) ).divide(fixtures.precision).valueOf(), amount: bigRat( eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+2))) ).divide(fixtures.ether).valueOf(), owner: eth.stateAt(fixtures.addresses.trades, String(ptr+3)), market: { id: mid, name: markets[mid].name } }); } ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+9)))); progress({percent: (i + 1) / total * 100 }); }; setTimeout(function() { // temporary slowdown while testing if (trades) { success(trades); } else { failure("Unable to load trades. Playing cards."); } }, 500); }; this.addTrade = function(trade, success, failure) { var amounts = this.getAmounts(trade.amount, trade.price); var data = eth.pad(trade.type, 32) + eth.pad(amounts.amount, 32) + eth.pad(amounts.price, 32) + eth.pad(trade.market, 32); try { if (ethBrowser) eth.transact({ from: eth.key, value: trade.type == 1 ? amounts.total : "0", to: fixtures.addresses.etherex, data: eth.fromAscii(data), gas: "10000", gasPrice: eth.gasPrice }, success); else eth.transact( eth.key, trade.type == 1 ? amounts.total : "0", fixtures.addresses.etherex, data, "10000", eth.gasPrice, success ); } catch(e) { failure(e); } }; this.fillTrade = function(trade, success, failure) { var amounts = this.getAmounts(trade.amount, trade.price); var data = eth.pad(3, 32) + eth.pad(trade.id, 32); try { if (ethBrowser) eth.transact({ from: eth.key, value: trade.type == "sell" ? amounts.total : "0", to: fixtures.addresses.etherex, data: eth.fromAscii(data), gas: "10000", gasPrice: eth.gasPrice }, success); else eth.transact( eth.key, trade.type == "sell" ? amounts.total : "0", fixtures.addresses.etherex, data, "10000", eth.gasPrice, success ); } catch(e) { failure(e); } }; this.cancelTrade = function(trade, success, failure) { var data = eth.pad(6, 32) + eth.pad(trade.id, 32); try { if (ethBrowser) eth.transact({ from: eth.key, value: "0", to: fixtures.addresses.etherex, data: eth.fromAscii(data), gas: "10000", gasPrice: eth.gasPrice }, success); else eth.transact( eth.key, "0", fixtures.addresses.etherex, data, "10000", eth.gasPrice, success ); } catch(e) { failure(e); } }; this.getAmounts = function(amount, price) { var bigamount = bigRat(parseFloat(amount)).multiply(bigRat(fixtures.ether)).floor(true).toString(); var bigprice = bigRat(parseFloat(price)).multiply(bigRat(fixtures.precision)).floor(true).toString(); var total = bigRat(parseFloat(amount)) .divide(parseFloat(price)) .multiply(bigRat(fixtures.ether)).floor(true).toString(); // console.log("amount: " + bigamount); // console.log("price: " + bigprice); // console.log("total: " + total); return { amount: bigamount, price: bigprice, total: total } }; this.formatUnconfirmed = function(confirmed, unconfirmed) { unconfirmed = unconfirmed - confirmed; if (unconfirmed < 0) unconfirmed = "- " + utils.formatBalance(-unconfirmed); else unconfirmed = utils.formatBalance(unconfirmed); return unconfirmed; }; }; module.exports = EthereumClient;
etherex/frontend/app/clients/EthereumClient.js
var utils = require("../js/utils"); var fixtures = require("../js/fixtures"); var bigRat = require('big-rational'); var EthereumClient = function() { this.loadAddresses = function(success, failure) { var addresses = eth.keys.map(function (k) { return eth.secretToAddress(k); }); if (addresses) success(addresses); else failure("Unable to load addresses. Lost your keys?"); }; this.loadMarkets = function(success, failure) { var markets = [{}]; var total = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(2)))); var ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(18)))); var last = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(19)))); console.log("TOTAL MARKETS: " + total); console.log("MARKETS START: " + ptr); console.log("MARKETS LAST: " + last); for (var i = 0; i < total; i++) { var id = eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+7))); console.log("LOADING MARKET ID: " + id); if (id) { markets.push({ id: id, name: eth.toAscii(eth.stateAt(fixtures.addresses.markets, String(ptr))), address: eth.stateAt(fixtures.addresses.markets, String(ptr+3)), amount: eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+1))), precision: eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+2))), price: eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+4))), decimals: eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+5))), }); } ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+9)))); }; if (markets) { success(markets); } else { failure("Unable to load markets. Make a wish!"); } }; this.setUserWatches = function(flux, addresses, markets) { if (ethBrowser) { // ETH balance eth.watch({altered: addresses}).changed(flux.actions.user.updateBalance); // Sub balances var market_addresses = _.rest(_.pluck(markets, 'address')); eth.watch({altered: market_addresses}).changed(flux.actions.user.updateBalanceSub); } else { for (var i = addresses.length - 1; i >= 0; i--) { eth.watch(addresses[i], "", flux.actions.user.updateBalance); flux.actions.user.updateBalanceSub(); for (var m = markets.length - 1; m >= 0; m--) eth.watch(markets[m].address, "", flux.actions.user.updateBalanceSub); } } }; this.setMarketWatches = function(flux, markets) { var market_addresses = _.rest(_.pluck(markets, 'address')); if (ethBrowser) { eth.watch({altered: market_addresses}).changed(flux.actions.trade.loadTrades); } else { flux.actions.trade.loadTrades(); // for (var i = market_addresses.length - 1; i >= 0; i--) { // flux.actions.trade.loadTrades(); // eth.watch(market_addresses[i], "", flux.actions.trade.loadTrades); // } } }; this.updateBalance = function(address, success, failure) { var confirmed = eth.toDecimal(eth.balanceAt(address, -1)); var unconfirmed = eth.toDecimal(eth.balanceAt(address)); var showUnconfirmed = false; if (unconfirmed != confirmed) { showUnconfirmed = true; unconfirmed = this.formatUnconfirmed(confirmed, unconfirmed); } if (confirmed >= 0) { success( utils.formatBalance(confirmed), showUnconfirmed ? "(" + unconfirmed + " unconfirmed)" : null ); } else { failure("Failed to update balance. We fell."); } }; this.updateBalanceSub = function(market, address, success, failure) { var confirmed = eth.toDecimal(eth.stateAt(market.address, address, -1)); var unconfirmed = eth.toDecimal(eth.stateAt(market.address, address)); var showUnconfirmed = false; // DEBUG // console.log("confirmed: " + confirmed); // console.log("unconfirmed: " + unconfirmed); // console.log(this.formatUnconfirmed(confirmed, unconfirmed)); if (unconfirmed != confirmed) { showUnconfirmed = true; unconfirmed = this.formatUnconfirmed(confirmed, unconfirmed); } if (confirmed >= 0) { success( utils.formatBalance(confirmed), showUnconfirmed ? "(" + unconfirmed + " unconfirmed)" : null ); } else { failure("Failed to update subcurrency balance. No dice."); } }; this.loadTrades = function(flux, markets, progress, success, failure) { var trades = []; var total = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(2)))); var ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(18)))); var last = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(19)))); var start = ptr; console.log("TOTAL TRADES: " + total); console.log("TRADES START: " + ptr); console.log("TRADES LAST: " + last); for (var i = 0; i < total; i++) { var type = eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr))); if (type) { var mid = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+4)))); console.log("Loading trade " + i + " for market " + markets[mid].name); trades.push({ id: ptr, type: type == 1 ? 'buy' : 'sell', price: bigRat( eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+1))) ).divide(fixtures.precision).valueOf(), amount: bigRat( eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+2))) ).divide(fixtures.ether).valueOf(), owner: eth.stateAt(fixtures.addresses.trades, String(ptr+3)), market: { id: mid, name: markets[mid].name } }); } ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+9)))); progress({percent: (i + 1) / total * 100 }); }; setTimeout(function() { // temporary slowdown while testing if (trades) { success(trades); } else { failure("Unable to load trades. Playing cards."); } }, 500); }; this.addTrade = function(trade, success, failure) { var amounts = this.getAmounts(trade.amount, trade.price); var data = eth.pad(trade.type, 32) + eth.pad(amounts.amount, 32) + eth.pad(amounts.price, 32) + eth.pad(trade.market, 32); try { if (ethBrowser) eth.transact({ from: eth.key, value: trade.type == 1 ? amounts.total : "0", to: fixtures.addresses.etherex, data: eth.fromAscii(data), gas: "10000", gasPrice: eth.gasPrice }, success); else eth.transact( eth.key, trade.type == 1 ? amounts.total : "0", fixtures.addresses.etherex, data, "10000", eth.gasPrice, success ); } catch(e) { failure(e); } }; this.fillTrade = function(trade, success, failure) { var amounts = this.getAmounts(trade.amount, trade.price); var data = eth.pad(3, 32) + eth.pad(trade.id, 32); try { if (ethBrowser) eth.transact({ from: eth.key, value: trade.type == "sell" ? amounts.total : "0", to: fixtures.addresses.etherex, data: eth.fromAscii(data), gas: "10000", gasPrice: eth.gasPrice }, success); else eth.transact( eth.key, trade.type == "sell" ? amounts.total : "0", fixtures.addresses.etherex, data, "10000", eth.gasPrice, success ); } catch(e) { failure(e); } }; this.cancelTrade = function(trade, success, failure) { var data = eth.pad(6, 32) + eth.pad(trade.id, 32); try { if (ethBrowser) eth.transact({ from: eth.key, value: "0", to: fixtures.addresses.etherex, data: eth.fromAscii(data), gas: "10000", gasPrice: eth.gasPrice }, success); else eth.transact( eth.key, "0", fixtures.addresses.etherex, data, "10000", eth.gasPrice, success ); } catch(e) { failure(e); } }; this.getAmounts = function(amount, price) { var bigamount = bigRat(parseFloat(amount)).multiply(bigRat(fixtures.ether)).floor(true).toString(); var bigprice = bigRat(parseFloat(price)).multiply(bigRat(fixtures.precision)).floor(true).toString(); var total = bigRat(parseFloat(amount)) .divide(parseFloat(price)) .multiply(bigRat(fixtures.ether)).floor(true).toString(); // console.log("amount: " + bigamount); // console.log("price: " + bigprice); // console.log("total: " + total); return { amount: bigamount, price: bigprice, total: total } }; this.formatUnconfirmed = function(confirmed, unconfirmed) { unconfirmed = unconfirmed - confirmed; if (unconfirmed < 0) unconfirmed = "- " + utils.formatBalance(-unconfirmed); else unconfirmed = utils.formatBalance(unconfirmed); return unconfirmed; }; }; module.exports = EthereumClient;
fix trade pointer in UI
etherex/frontend/app/clients/EthereumClient.js
fix trade pointer in UI
<ide><path>therex/frontend/app/clients/EthereumClient.js <ide> for (var i = 0; i < total; i++) { <ide> var type = eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr))); <ide> if (type) { <del> var mid = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+4)))); <add> var mid = eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+4))); <ide> console.log("Loading trade " + i + " for market " + markets[mid].name); <ide> trades.push({ <ide> id: ptr, <ide> } <ide> }); <ide> } <del> ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.markets, String(ptr+9)))); <add> ptr = _.parseInt(eth.toDecimal(eth.stateAt(fixtures.addresses.trades, String(ptr+9)))); <ide> <ide> progress({percent: (i + 1) / total * 100 }); <ide> };
Java
mit
80bac3a9c0650aec0273f3c803fa918c3d878b1e
0
garnryang/Team8,garnryang/Team8
package Solver; import static org.junit.Assert.*; import org.junit.Test; import CoreDataStructures.Board; import CoreDataStructures.Cell; import CoreDataStructures.Puzzle; import CoreDataStructures.Puzzle.DifficultyLevel; public class HintGeneratorTests { @Test public void canGetHintWhenACellHasOnlyOneAvailableNumber() { Puzzle testPuzzle = getPuzzleWithCellWithOnlyOneAvailableNumber(); Board testBoard = new Board(); testBoard.Initialize(testPuzzle); HintInfo hint = HintGenerator.getHint(testBoard); assertTrue(hint != null); Cell filledCell = hint.GetCell(); assertEquals(1, filledCell.getCoordinates().getRowIndex()); assertEquals(1, filledCell.getCoordinates().getColumnIndex()); assertEquals(4, filledCell.getNumber()); //This can change. Made something up for now. String expectedExplanation = "This cell's value must be 4 because it is the only number that can fit in this cell"; assertEquals(expectedExplanation, hint.GetExplanation()); } @Test public void canGetHintWhenThereIsOnlyOneCellWhereANumberCanFit() { Puzzle testPuzzle = getPuzzleWhereANumberCanOnlyFitInOneCell(); Board testBoard = new Board(); testBoard.Initialize(testPuzzle); HintInfo hint = HintGenerator.getHint(testBoard); assertTrue(hint != null); Cell filledCell = hint.GetCell(); assertEquals(0, filledCell.getCoordinates().getRowIndex()); assertEquals(0, filledCell.getCoordinates().getColumnIndex()); assertEquals(7, filledCell.getNumber()); //This can change. Made something up for now. String expectedExplanation = "This cell's value must be 7 because it is the only cell where this number can fit"; assertEquals(expectedExplanation, hint.GetExplanation()); } /** * (1, 1) is the open cell with only one available number: 4 */ private static Puzzle getPuzzleWithCellWithOnlyOneAvailableNumber() { //Got this one from my iphone Sudoku app... Puzzle puzzle = new Puzzle(); puzzle.setDifficulty(DifficultyLevel.Medium); puzzle.setCellNumber(0, 2, 8); puzzle.setCellNumber(0, 7, 5); puzzle.setCellNumber(1, 0, 6); puzzle.setCellNumber(1, 2, 3); puzzle.setCellNumber(1, 3, 8); puzzle.setCellNumber(1, 4, 5); puzzle.setCellNumber(1, 5, 2); puzzle.setCellNumber(1, 6, 1); puzzle.setCellNumber(1, 7, 9); puzzle.setCellNumber(1, 8, 7); puzzle.setCellNumber(2, 0, 1); puzzle.setCellNumber(2, 3, 7); puzzle.setCellNumber(2, 4, 4); puzzle.setCellNumber(2, 8, 2); puzzle.setCellNumber(3, 8, 3); puzzle.setCellNumber(4, 2, 4); puzzle.setCellNumber(5, 1, 7); puzzle.setCellNumber(5, 6, 4); puzzle.setCellNumber(5, 8, 5); puzzle.setCellNumber(6, 0, 3); puzzle.setCellNumber(6, 3, 9); puzzle.setCellNumber(6, 8, 8); puzzle.setCellNumber(7, 0, 5); puzzle.setCellNumber(7, 4, 3); puzzle.setCellNumber(7, 6, 6); puzzle.setCellNumber(7, 7, 2); puzzle.setCellNumber(7, 8, 1); puzzle.setCellNumber(8, 1, 6); puzzle.setCellNumber(8, 4, 5); puzzle.setCellNumber(8, 5, 8); return puzzle; } /** * (0, 0) is the only place a 7 can fit */ private static Puzzle getPuzzleWhereANumberCanOnlyFitInOneCell() { //Got this one from my iphone Sudoku app... //Same puzzle as above with the 4 filled in Puzzle puzzle = new Puzzle(); puzzle.setDifficulty(DifficultyLevel.Medium); puzzle.setCellNumber(0, 2, 8); puzzle.setCellNumber(0, 7, 5); puzzle.setCellNumber(1, 0, 6); puzzle.setCellNumber(1, 1, 4); puzzle.setCellNumber(1, 2, 3); puzzle.setCellNumber(1, 3, 8); puzzle.setCellNumber(1, 4, 5); puzzle.setCellNumber(1, 5, 2); puzzle.setCellNumber(1, 6, 1); puzzle.setCellNumber(1, 7, 9); puzzle.setCellNumber(1, 8, 7); puzzle.setCellNumber(2, 0, 1); puzzle.setCellNumber(2, 3, 7); puzzle.setCellNumber(2, 4, 4); puzzle.setCellNumber(2, 8, 2); puzzle.setCellNumber(3, 8, 3); puzzle.setCellNumber(4, 2, 4); puzzle.setCellNumber(5, 1, 7); puzzle.setCellNumber(5, 6, 4); puzzle.setCellNumber(5, 8, 5); puzzle.setCellNumber(6, 0, 3); puzzle.setCellNumber(6, 3, 9); puzzle.setCellNumber(6, 8, 8); puzzle.setCellNumber(7, 0, 5); puzzle.setCellNumber(7, 4, 3); puzzle.setCellNumber(7, 6, 6); puzzle.setCellNumber(7, 7, 2); puzzle.setCellNumber(7, 8, 1); puzzle.setCellNumber(8, 1, 6); puzzle.setCellNumber(8, 4, 5); puzzle.setCellNumber(8, 5, 8); return puzzle; } }
test/Solver/HintGeneratorTests.java
package Solver; import static org.junit.Assert.*; import org.junit.Test; import CoreDataStructures.Board; import CoreDataStructures.Cell; import CoreDataStructures.Puzzle; import CoreDataStructures.Puzzle.DifficultyLevel; public class HintGeneratorTests { @Test public void canGetHintWhenACellHasOnlyOneAvailableNumber() { Puzzle testPuzzle = getPuzzleWithCellWithOnlyOneAvailableNumber(); Board testBoard = new Board(); testBoard.Initialize(testPuzzle); HintInfo hint = HintGenerator.getHint(testBoard); assertTrue(hint != null); Cell filledCell = hint.GetCell(); assertEquals(1, filledCell.getCoordinates().getRowIndex()); assertEquals(1, filledCell.getCoordinates().getColumnIndex()); assertEquals(4, filledCell.getNumber()); //This can change. Made something up for now. String expectedExplanation = "This cell's value must be 4 because it is the only number that can fit in this cell"; assertEquals(expectedExplanation, hint.GetExplanation()); } @Test public void canGetHintWhenThereIsOnlyOneCellWhereANumberCanFit() { Puzzle testPuzzle = getPuzzleWhereANumberCanOnlyFitInOneCell(); Board testBoard = new Board(); testBoard.Initialize(testPuzzle); HintInfo hint = HintGenerator.getHint(testBoard); assertTrue(hint != null); Cell filledCell = hint.GetCell(); assertEquals(0, filledCell.getCoordinates().getRowIndex()); assertEquals(0, filledCell.getCoordinates().getColumnIndex()); assertEquals(7, filledCell.getNumber()); //This can change. Made something up for now. String expectedExplanation = "This cell's value must be 7 because it is the only cell where this number can fit"; assertEquals(expectedExplanation, hint.GetExplanation()); } /** * (1, 1) is the open cell with only one available number: 4 */ public static Puzzle getPuzzleWithCellWithOnlyOneAvailableNumber() { //Got this one from my iphone Sudoku app... Puzzle puzzle = new Puzzle(); puzzle.setDifficulty(DifficultyLevel.Medium); puzzle.setCellNumber(0, 2, 8); puzzle.setCellNumber(0, 7, 5); puzzle.setCellNumber(1, 0, 6); puzzle.setCellNumber(1, 2, 3); puzzle.setCellNumber(1, 3, 8); puzzle.setCellNumber(1, 4, 5); puzzle.setCellNumber(1, 5, 2); puzzle.setCellNumber(1, 6, 1); puzzle.setCellNumber(1, 7, 9); puzzle.setCellNumber(1, 8, 7); puzzle.setCellNumber(2, 0, 1); puzzle.setCellNumber(2, 3, 7); puzzle.setCellNumber(2, 4, 4); puzzle.setCellNumber(2, 8, 2); puzzle.setCellNumber(3, 8, 3); puzzle.setCellNumber(4, 2, 4); puzzle.setCellNumber(5, 1, 7); puzzle.setCellNumber(5, 6, 4); puzzle.setCellNumber(5, 8, 5); puzzle.setCellNumber(6, 0, 3); puzzle.setCellNumber(6, 3, 9); puzzle.setCellNumber(6, 8, 8); puzzle.setCellNumber(7, 0, 5); puzzle.setCellNumber(7, 4, 3); puzzle.setCellNumber(7, 6, 6); puzzle.setCellNumber(7, 7, 2); puzzle.setCellNumber(7, 8, 1); puzzle.setCellNumber(8, 1, 6); puzzle.setCellNumber(8, 4, 5); puzzle.setCellNumber(8, 5, 8); return puzzle; } /** * (0, 0) is the only place a 7 can fit */ public static Puzzle getPuzzleWhereANumberCanOnlyFitInOneCell() { //Got this one from my iphone Sudoku app... //Same puzzle as above with the 4 filled in Puzzle puzzle = new Puzzle(); puzzle.setDifficulty(DifficultyLevel.Medium); puzzle.setCellNumber(0, 2, 8); puzzle.setCellNumber(0, 7, 5); puzzle.setCellNumber(1, 0, 6); puzzle.setCellNumber(1, 1, 4); puzzle.setCellNumber(1, 2, 3); puzzle.setCellNumber(1, 3, 8); puzzle.setCellNumber(1, 4, 5); puzzle.setCellNumber(1, 5, 2); puzzle.setCellNumber(1, 6, 1); puzzle.setCellNumber(1, 7, 9); puzzle.setCellNumber(1, 8, 7); puzzle.setCellNumber(2, 0, 1); puzzle.setCellNumber(2, 3, 7); puzzle.setCellNumber(2, 4, 4); puzzle.setCellNumber(2, 8, 2); puzzle.setCellNumber(3, 8, 3); puzzle.setCellNumber(4, 2, 4); puzzle.setCellNumber(5, 1, 7); puzzle.setCellNumber(5, 6, 4); puzzle.setCellNumber(5, 8, 5); puzzle.setCellNumber(6, 0, 3); puzzle.setCellNumber(6, 3, 9); puzzle.setCellNumber(6, 8, 8); puzzle.setCellNumber(7, 0, 5); puzzle.setCellNumber(7, 4, 3); puzzle.setCellNumber(7, 6, 6); puzzle.setCellNumber(7, 7, 2); puzzle.setCellNumber(7, 8, 1); puzzle.setCellNumber(8, 1, 6); puzzle.setCellNumber(8, 4, 5); puzzle.setCellNumber(8, 5, 8); return puzzle; } }
Make methods private
test/Solver/HintGeneratorTests.java
Make methods private
<ide><path>est/Solver/HintGeneratorTests.java <ide> /** <ide> * (1, 1) is the open cell with only one available number: 4 <ide> */ <del> public static Puzzle getPuzzleWithCellWithOnlyOneAvailableNumber() { <add> private static Puzzle getPuzzleWithCellWithOnlyOneAvailableNumber() { <ide> //Got this one from my iphone Sudoku app... <ide> Puzzle puzzle = new Puzzle(); <ide> puzzle.setDifficulty(DifficultyLevel.Medium); <ide> /** <ide> * (0, 0) is the only place a 7 can fit <ide> */ <del> public static Puzzle getPuzzleWhereANumberCanOnlyFitInOneCell() { <add> private static Puzzle getPuzzleWhereANumberCanOnlyFitInOneCell() { <ide> //Got this one from my iphone Sudoku app... <ide> //Same puzzle as above with the 4 filled in <ide> Puzzle puzzle = new Puzzle();
Java
apache-2.0
a4af51dffae252a4aa274856fb97815973f57afa
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.build; import com.intellij.build.events.*; import com.intellij.execution.filters.Filter; import com.intellij.execution.filters.HyperlinkInfo; import com.intellij.execution.filters.TextConsoleBuilderFactory; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.ui.ThreeComponentsSplitter; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.ui.*; import com.intellij.ui.speedSearch.SpeedSearchUtil; import com.intellij.ui.tree.AsyncTreeModel; import com.intellij.ui.tree.StructureTreeModel; import com.intellij.ui.tree.treeTable.TreeTableModelWithColumns; import com.intellij.ui.treeStructure.SimpleNode; import com.intellij.ui.treeStructure.SimpleTreeStructure; import com.intellij.ui.treeStructure.treetable.TreeColumnInfo; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.EditSourceOnDoubleClickHandler; import com.intellij.util.EditSourceOnEnterKeyHandler; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.DateFormatUtil; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.util.ui.update.MergingUpdateQueue; import com.intellij.util.ui.update.Update; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.CompoundBorder; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableColumn; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeCellRenderer; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; /** * @author Vladislav.Soroka */ public class BuildTreeConsoleView implements ConsoleView, DataProvider, BuildConsoleView { private static final Logger LOG = Logger.getInstance(BuildTreeConsoleView.class); @NonNls private static final String TREE = "tree"; private final JPanel myPanel = new JPanel(); private final Map<Object, ExecutionNode> nodesMap = ContainerUtil.newConcurrentMap(); private final Project myProject; private final DetailsHandler myDetailsHandler; private final TableColumn myTimeColumn; private final String myWorkingDir; private volatile int myTimeColumnWidth; private final AtomicBoolean myDisposed = new AtomicBoolean(); private final MergingUpdateQueue myLaterInvocator = new MergingUpdateQueue("BuildTreeConsoleView later invocator", 100, true, null, this); private final StructureTreeModel<SimpleTreeStructure> myTreeModel; private final TreeTableTree myTree; private final ExecutionNode myRootNode; public BuildTreeConsoleView(Project project, BuildDescriptor buildDescriptor) { myProject = project; myWorkingDir = FileUtil.toSystemIndependentName(buildDescriptor.getWorkingDir()); final ColumnInfo[] COLUMNS = { new TreeColumnInfo("name"), new ColumnInfo("time elapsed") { @Nullable @Override public Object valueOf(Object o) { if (o instanceof DefaultMutableTreeNode) { final Object userObject = ((DefaultMutableTreeNode)o).getUserObject(); if (userObject instanceof ExecutionNode) { String duration = ((ExecutionNode)userObject).getDuration(); updateTimeColumnWidth("___" + duration, false); return duration; } } return null; } } }; myRootNode = new ExecutionNode(myProject, null); myRootNode.setAutoExpandNode(true); SimpleTreeStructure treeStructure = new SimpleTreeStructure.Impl(myRootNode); myTreeModel = new StructureTreeModel<>(treeStructure); final TreeTableModel model = new TreeTableModelWithColumns(new AsyncTreeModel(myTreeModel, this), COLUMNS); DefaultTableCellRenderer timeColumnCellRenderer = new DefaultTableCellRenderer() { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); setHorizontalAlignment(SwingConstants.RIGHT); Color fg = isSelected ? UIUtil.getTreeSelectionForeground(hasFocus) : SimpleTextAttributes.GRAY_ATTRIBUTES.getFgColor(); setForeground(fg); return this; } }; TreeTable treeTable = new TreeTable(model) { @Override public TableCellRenderer getCellRenderer(int row, int column) { if (column == 1) { return timeColumnCellRenderer; } return super.getCellRenderer(row, column); } }; EditSourceOnDoubleClickHandler.install(treeTable); EditSourceOnEnterKeyHandler.install(treeTable, null); myTree = treeTable.getTree(); treeTable.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { treeTable.setSelectionBackground(UIUtil.getTreeSelectionBackground(true)); } @Override public void focusLost(FocusEvent e) { treeTable.setSelectionBackground(UIUtil.getTreeSelectionBackground(false)); } }); final TreeCellRenderer treeCellRenderer = myTree.getCellRenderer(); myTree.setCellRenderer(new TreeCellRenderer() { @Override public Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { final Component rendererComponent = treeCellRenderer.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); if (rendererComponent instanceof SimpleColoredComponent) { Color bg = UIUtil.getTreeBackground(selected, true); Color fg = UIUtil.getTreeForeground(selected, true); if (selected) { for (SimpleColoredComponent.ColoredIterator it = ((SimpleColoredComponent)rendererComponent).iterator(); it.hasNext(); ) { it.next(); int offset = it.getOffset(); int endOffset = it.getEndOffset(); SimpleTextAttributes currentAttributes = it.getTextAttributes(); SimpleTextAttributes newAttributes = new SimpleTextAttributes(bg, fg, currentAttributes.getWaveColor(), currentAttributes.getStyle()); it.split(endOffset - offset, newAttributes); } } SpeedSearchUtil.applySpeedSearchHighlighting(treeTable, (SimpleColoredComponent)rendererComponent, true, selected); } return rendererComponent; } }); new TreeTableSpeedSearch(treeTable).setComparator(new SpeedSearchComparator(false)); treeTable.setTableHeader(null); myTimeColumn = treeTable.getColumnModel().getColumn(1); myTimeColumn.setResizable(false); updateTimeColumnWidth("Running for " + StringUtil.formatDuration(11111L), true); TreeUtil.installActions(myTree); JPanel myContentPanel = new JPanel(); myContentPanel.setLayout(new CardLayout()); myContentPanel.add(ScrollPaneFactory.createScrollPane(treeTable, SideBorder.LEFT), TREE); myPanel.setLayout(new BorderLayout()); ThreeComponentsSplitter myThreeComponentsSplitter = new ThreeComponentsSplitter() { @Override public void doLayout() { super.doLayout(); JComponent detailsComponent = myDetailsHandler.getComponent(); if (detailsComponent != null && detailsComponent.isVisible()) { int firstSize = getFirstSize(); int lastSize = getLastSize(); if (firstSize == 0 && lastSize == 0) { int width = Math.round(getWidth() / 2f); if (width > 0) { setFirstSize(width); } } } } }; Disposer.register(this, myThreeComponentsSplitter); myThreeComponentsSplitter.setFirstComponent(myContentPanel); myDetailsHandler = new DetailsHandler(myProject, myTree, myThreeComponentsSplitter); myThreeComponentsSplitter.setLastComponent(myDetailsHandler.getComponent()); myPanel.add(myThreeComponentsSplitter, BorderLayout.CENTER); } private ExecutionNode getRootElement() { return myRootNode; } @Override public void print(@NotNull String text, @NotNull ConsoleViewContentType contentType) { } @Override public void clear() { getRootElement().removeChildren(); nodesMap.clear(); myDetailsHandler.clear(); myTreeModel.invalidate(); } @Override public void scrollTo(int offset) { } @Override public void attachToProcess(ProcessHandler processHandler) { } @Override public void setOutputPaused(boolean value) { } @Override public boolean isOutputPaused() { return false; } @Override public boolean hasDeferredOutput() { return false; } @Override public void performWhenNoDeferredOutput(@NotNull Runnable runnable) { } @Override public void setHelpId(@NotNull String helpId) { } @Override public void addMessageFilter(@NotNull Filter filter) { } @Override public void printHyperlink(@NotNull String hyperlinkText, @Nullable HyperlinkInfo info) { } @Override public int getContentSize() { return 0; } @Override public boolean canPause() { return false; } @NotNull @Override public AnAction[] createConsoleActions() { return AnAction.EMPTY_ARRAY; } @Override public void allowHeavyFilters() { } @Override public JComponent getComponent() { return myPanel; } @Override public JComponent getPreferredFocusableComponent() { return myTree; } @Override public void dispose() { myDisposed.set(true); } public boolean isDisposed() { return myDisposed.get(); } @Override public void onEvent(@NotNull BuildEvent event) { ExecutionNode parentNode = event.getParentId() == null ? null : nodesMap.get(event.getParentId()); ExecutionNode currentNode = nodesMap.get(event.getId()); if (event instanceof StartEvent || event instanceof MessageEvent) { ExecutionNode rootElement = getRootElement(); if (currentNode == null) { if (event instanceof StartBuildEvent) { currentNode = rootElement; } else { if (event instanceof MessageEvent) { MessageEvent messageEvent = (MessageEvent)event; parentNode = createMessageParentNodes(messageEvent, parentNode); } currentNode = new ExecutionNode(myProject, parentNode); } currentNode.setAutoExpandNode(currentNode == rootElement || parentNode == rootElement); nodesMap.put(event.getId(), currentNode); } else { LOG.warn("start event id collision found"); return; } if (parentNode != null) { parentNode.add(currentNode); } if (event instanceof StartBuildEvent) { String buildTitle = ((StartBuildEvent)event).getBuildTitle(); currentNode.setTitle(buildTitle); currentNode.setAutoExpandNode(true); scheduleUpdate(currentNode); } else if (event instanceof MessageEvent) { MessageEvent messageEvent = (MessageEvent)event; currentNode.setStartTime(messageEvent.getEventTime()); currentNode.setEndTime(messageEvent.getEventTime()); currentNode.setNavigatable(messageEvent.getNavigatable(myProject)); final MessageEventResult messageEventResult = messageEvent.getResult(); currentNode.setResult(messageEventResult); } } else { currentNode = nodesMap.get(event.getId()); if (currentNode == null && event instanceof ProgressBuildEvent) { currentNode = new ExecutionNode(myProject, parentNode); nodesMap.put(event.getId(), currentNode); if (parentNode != null) { parentNode.add(currentNode); } } } if (currentNode == null) { // TODO log error return; } currentNode.setName(event.getMessage()); currentNode.setHint(event.getHint()); if (currentNode.getStartTime() == 0) { currentNode.setStartTime(event.getEventTime()); } if (event instanceof FinishEvent) { currentNode.setEndTime(event.getEventTime()); currentNode.setResult(((FinishEvent)event).getResult()); int timeColumnWidth = new JLabel("__" + currentNode.getDuration(), SwingConstants.RIGHT).getPreferredSize().width; if (myTimeColumnWidth < timeColumnWidth) { myTimeColumnWidth = timeColumnWidth; } } else { scheduleUpdate(currentNode); } if (event instanceof FinishBuildEvent) { String aHint = event.getHint(); String time = DateFormatUtil.formatDateTime(event.getEventTime()); aHint = aHint == null ? "at " + time : aHint + " at " + time; currentNode.setHint(aHint); updateTimeColumnWidth(myTimeColumnWidth); if (myDetailsHandler.myExecutionNode == null) { myDetailsHandler.setNode(getRootElement()); } if (((FinishBuildEvent)event).getResult() instanceof FailureResult) { JTree tree = myTree; if (tree != null && !tree.isRootVisible()) { ExecutionNode rootElement = getRootElement(); ExecutionNode resultNode = new ExecutionNode(myProject, rootElement); resultNode.setName(StringUtil.toTitleCase(rootElement.getName())); resultNode.setHint(rootElement.getHint()); resultNode.setEndTime(rootElement.getEndTime()); resultNode.setStartTime(rootElement.getStartTime()); resultNode.setResult(rootElement.getResult()); resultNode.setTooltip(rootElement.getTooltip()); rootElement.add(resultNode); scheduleUpdate(resultNode); } } myTreeModel.invalidate(); } } void scheduleUpdate(ExecutionNode executionNode) { SimpleNode node = executionNode.getParent() == null ? executionNode : executionNode.getParent(); final Update update = new Update(node) { @Override public void run() { myTreeModel.invalidate(node, true) .onSuccess(p -> TreeUtil.expand(myTree, 2)); } }; myLaterInvocator.queue(update); } private ExecutionNode createMessageParentNodes(MessageEvent messageEvent, ExecutionNode parentNode) { Object messageEventParentId = messageEvent.getParentId(); if (messageEventParentId == null) return null; String group = messageEvent.getGroup(); String groupNodeId = group.hashCode() + messageEventParentId.toString(); ExecutionNode messagesGroupNode = getOrCreateMessagesNode(messageEvent, groupNodeId, parentNode, null, group, true, null, null, nodesMap, myProject); EventResult groupNodeResult = messagesGroupNode.getResult(); final MessageEvent.Kind eventKind = messageEvent.getKind(); if (!(groupNodeResult instanceof MessageEventResult) || ((MessageEventResult)groupNodeResult).getKind().compareTo(eventKind) > 0) { messagesGroupNode.setResult(new MessageEventResult() { @Override public MessageEvent.Kind getKind() { return eventKind; } }); } if (messageEvent instanceof FileMessageEvent) { ExecutionNode fileParentNode = messagesGroupNode; FilePosition filePosition = ((FileMessageEvent)messageEvent).getFilePosition(); String filePath = FileUtil.toSystemIndependentName(filePosition.getFile().getPath()); String parentsPath = ""; String relativePath = FileUtil.getRelativePath(myWorkingDir, filePath, '/'); if (relativePath != null) { String nodeId = groupNodeId + myWorkingDir; ExecutionNode workingDirNode = getOrCreateMessagesNode(messageEvent, nodeId, messagesGroupNode, myWorkingDir, null, false, () -> AllIcons.Nodes.Module, null, nodesMap, myProject); parentsPath = myWorkingDir; fileParentNode = workingDirNode; } VirtualFile sourceRootForFile; VirtualFile ioFile = VfsUtil.findFileByIoFile(new File(filePath), false); if (ioFile != null && (sourceRootForFile = ProjectFileIndex.SERVICE.getInstance(myProject).getSourceRootForFile(ioFile)) != null) { relativePath = FileUtil.getRelativePath(parentsPath, sourceRootForFile.getPath(), '/'); if (relativePath != null) { parentsPath += ("/" + relativePath); String contentRootNodeId = groupNodeId + sourceRootForFile.getPath(); fileParentNode = getOrCreateMessagesNode(messageEvent, contentRootNodeId, fileParentNode, relativePath, null, false, () -> ProjectFileIndex.SERVICE.getInstance(myProject).isInTestSourceContent(ioFile) ? AllIcons.Modules.TestRoot : AllIcons.Modules.SourceRoot, null, nodesMap, myProject); } } String fileNodeId = groupNodeId + filePath; relativePath = StringUtil.isEmpty(parentsPath) ? filePath : FileUtil.getRelativePath(parentsPath, filePath, '/'); parentNode = getOrCreateMessagesNode(messageEvent, fileNodeId, fileParentNode, relativePath, null, false, () -> { VirtualFile file = VfsUtil.findFileByIoFile(filePosition.getFile(), false); if (file != null) { return file.getFileType().getIcon(); } return null; }, messageEvent.getNavigatable(myProject), nodesMap, myProject); } else { parentNode = messagesGroupNode; } if (eventKind == MessageEvent.Kind.ERROR || eventKind == MessageEvent.Kind.WARNING) { SimpleNode p = parentNode; do { ((ExecutionNode)p).reportChildMessageKind(eventKind); } while ((p = p.getParent()) instanceof ExecutionNode); } return parentNode; } @NotNull private static ExecutionNode getOrCreateMessagesNode(MessageEvent messageEvent, String nodeId, ExecutionNode parentNode, String nodeName, String nodeTitle, boolean autoExpandNode, @Nullable Supplier<? extends Icon> iconProvider, @Nullable Navigatable navigatable, Map<Object, ExecutionNode> nodesMap, Project project) { ExecutionNode node = nodesMap.get(nodeId); if (node == null) { node = new ExecutionNode(project, parentNode); node.setName(nodeName); node.setTitle(nodeTitle); if (autoExpandNode) { node.setAutoExpandNode(true); } node.setStartTime(messageEvent.getEventTime()); node.setEndTime(messageEvent.getEventTime()); if (iconProvider != null) { node.setIconProvider(iconProvider); } if (navigatable != null) { node.setNavigatable(navigatable); } parentNode.add(node); nodesMap.put(nodeId, node); } return node; } public void hideRootNode() { UIUtil.invokeLaterIfNeeded(() -> { if (myTree != null) { myTree.setRootVisible(false); myTree.setShowsRootHandles(true); } }); } private void updateTimeColumnWidth(String text, boolean force) { int timeColumnWidth = new JLabel(text, SwingConstants.RIGHT).getPreferredSize().width; if (myTimeColumnWidth > timeColumnWidth) { timeColumnWidth = myTimeColumnWidth; } if (force || myTimeColumn.getMaxWidth() < timeColumnWidth || myTimeColumn.getWidth() < timeColumnWidth) { updateTimeColumnWidth(timeColumnWidth); } } private void updateTimeColumnWidth(int width) { myTimeColumn.setPreferredWidth(width); myTimeColumn.setMinWidth(width); myTimeColumn.setMaxWidth(width); } @Nullable @Override public Object getData(@NotNull String dataId) { if (PlatformDataKeys.HELP_ID.is(dataId)) return "reference.build.tool.window"; if (CommonDataKeys.PROJECT.is(dataId)) return myProject; if (CommonDataKeys.NAVIGATABLE_ARRAY.is(dataId)) return extractNavigatables(); return null; } private Object extractNavigatables() { final List<Navigatable> navigatables = new ArrayList<>(); for (ExecutionNode each : getSelectedNodes()) { List<Navigatable> navigatable = each.getNavigatables(); navigatables.addAll(navigatable); } return navigatables.isEmpty() ? null : navigatables.toArray(new Navigatable[0]); } private ExecutionNode[] getSelectedNodes() { final ExecutionNode[] result = new ExecutionNode[0]; if (myTree != null) { final List<ExecutionNode> nodes = TreeUtil.collectSelectedObjects(myTree, path -> TreeUtil.getLastUserObject(ExecutionNode.class, path)); return nodes.toArray(result); } return result; } private static class DetailsHandler { private final ThreeComponentsSplitter mySplitter; @Nullable private ExecutionNode myExecutionNode; private final ConsoleView myConsole; private final JPanel myPanel; DetailsHandler(Project project, TreeTableTree tree, ThreeComponentsSplitter threeComponentsSplitter) { myConsole = TextConsoleBuilderFactory.getInstance().createBuilder(project).getConsole(); mySplitter = threeComponentsSplitter; myPanel = new JPanel(new BorderLayout()); JComponent consoleComponent = myConsole.getComponent(); AnAction[] consoleActions = myConsole.createConsoleActions(); consoleComponent.setFocusable(true); final Color editorBackground = EditorColorsManager.getInstance().getGlobalScheme().getDefaultBackground(); consoleComponent.setBorder(new CompoundBorder(IdeBorderFactory.createBorder(SideBorder.RIGHT), new SideBorder(editorBackground, SideBorder.LEFT))); myPanel.add(consoleComponent, BorderLayout.CENTER); final ActionToolbar toolbar = ActionManager.getInstance() .createActionToolbar("BuildResults", new DefaultActionGroup(consoleActions), false); myPanel.add(toolbar.getComponent(), BorderLayout.EAST); myPanel.setVisible(false); tree.addTreeSelectionListener(e -> { TreePath path = e.getPath(); if (path == null || !e.isAddedPath()) { return; } TreePath selectionPath = tree.getSelectionPath(); setNode(selectionPath != null ? (DefaultMutableTreeNode)selectionPath.getLastPathComponent() : null); }); Disposer.register(threeComponentsSplitter, myConsole); } public boolean setNode(@NotNull ExecutionNode node) { EventResult eventResult = node.getResult(); boolean hasChanged = false; if (eventResult instanceof FailureResult) { myConsole.clear(); List<? extends Failure> failures = ((FailureResult)eventResult).getFailures(); if (failures.isEmpty()) return false; for (Iterator<? extends Failure> iterator = failures.iterator(); iterator.hasNext(); ) { Failure failure = iterator.next(); String text = ObjectUtils.chooseNotNull(failure.getDescription(), failure.getMessage()); if (text == null && failure.getError() != null) { text = failure.getError().getMessage(); } if (text == null) continue; printDetails(failure, text); hasChanged = true; if (iterator.hasNext()) { myConsole.print("\n\n", ConsoleViewContentType.NORMAL_OUTPUT); } } } else if (eventResult instanceof MessageEventResult) { String details = ((MessageEventResult)eventResult).getDetails(); if (details == null) { return false; } if (details.isEmpty()) { return false; } myConsole.clear(); printDetails(null, details); hasChanged = true; } if (!hasChanged) return false; myConsole.scrollTo(0); int firstSize = mySplitter.getFirstSize(); int lastSize = mySplitter.getLastSize(); if (firstSize == 0 && lastSize == 0) { int width = Math.round(mySplitter.getWidth() / 2f); mySplitter.setFirstSize(width); } myPanel.setVisible(true); return true; } private boolean printDetails(Failure failure, @Nullable String details) { return BuildConsoleUtils.printDetails(myConsole, failure, details); } public void setNode(@Nullable DefaultMutableTreeNode node) { if (node == null || node.getUserObject() == myExecutionNode) return; if (node.getUserObject() instanceof ExecutionNode) { myExecutionNode = (ExecutionNode)node.getUserObject(); if (setNode((ExecutionNode)node.getUserObject())) { return; } } myExecutionNode = null; myPanel.setVisible(false); } public JComponent getComponent() { return myPanel; } public void clear() { myPanel.setVisible(false); myConsole.clear(); } } }
platform/lang-impl/src/com/intellij/build/BuildTreeConsoleView.java
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.build; import com.intellij.build.events.*; import com.intellij.execution.filters.Filter; import com.intellij.execution.filters.HyperlinkInfo; import com.intellij.execution.filters.TextConsoleBuilderFactory; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.icons.AllIcons; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.ui.ThreeComponentsSplitter; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.pom.Navigatable; import com.intellij.ui.*; import com.intellij.ui.speedSearch.SpeedSearchUtil; import com.intellij.ui.tree.AsyncTreeModel; import com.intellij.ui.tree.StructureTreeModel; import com.intellij.ui.tree.treeTable.TreeTableModelWithColumns; import com.intellij.ui.treeStructure.SimpleNode; import com.intellij.ui.treeStructure.SimpleTreeStructure; import com.intellij.ui.treeStructure.treetable.TreeColumnInfo; import com.intellij.ui.treeStructure.treetable.TreeTable; import com.intellij.ui.treeStructure.treetable.TreeTableModel; import com.intellij.ui.treeStructure.treetable.TreeTableTree; import com.intellij.util.EditSourceOnDoubleClickHandler; import com.intellij.util.EditSourceOnEnterKeyHandler; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.DateFormatUtil; import com.intellij.util.ui.ColumnInfo; import com.intellij.util.ui.UIUtil; import com.intellij.util.ui.tree.TreeUtil; import com.intellij.util.ui.update.MergingUpdateQueue; import com.intellij.util.ui.update.Update; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.CompoundBorder; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableColumn; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeCellRenderer; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; /** * @author Vladislav.Soroka */ public class BuildTreeConsoleView implements ConsoleView, DataProvider, BuildConsoleView { private static final Logger LOG = Logger.getInstance(BuildTreeConsoleView.class); @NonNls private static final String TREE = "tree"; private final JPanel myPanel = new JPanel(); private final Map<Object, ExecutionNode> nodesMap = ContainerUtil.newConcurrentMap(); private final Project myProject; private final DetailsHandler myDetailsHandler; private final TableColumn myTimeColumn; private final String myWorkingDir; private volatile int myTimeColumnWidth; private final AtomicBoolean myDisposed = new AtomicBoolean(); private final MergingUpdateQueue myLaterInvocator = new MergingUpdateQueue("BuildTreeConsoleView later invocator", 100, true, null, this); private final StructureTreeModel<SimpleTreeStructure> myTreeModel; private final TreeTableTree myTree; private final ExecutionNode myRootNode; public BuildTreeConsoleView(Project project, BuildDescriptor buildDescriptor) { myProject = project; myWorkingDir = FileUtil.toSystemIndependentName(buildDescriptor.getWorkingDir()); final ColumnInfo[] COLUMNS = { new TreeColumnInfo("name"), new ColumnInfo("time elapsed") { @Nullable @Override public Object valueOf(Object o) { if (o instanceof DefaultMutableTreeNode) { final Object userObject = ((DefaultMutableTreeNode)o).getUserObject(); if (userObject instanceof ExecutionNode) { String duration = ((ExecutionNode)userObject).getDuration(); updateTimeColumnWidth("___" + duration, false); return duration; } } return null; } } }; myRootNode = new ExecutionNode(myProject, null); myRootNode.setAutoExpandNode(true); SimpleTreeStructure treeStructure = new SimpleTreeStructure.Impl(myRootNode); myTreeModel = new StructureTreeModel<>(treeStructure); final TreeTableModel model = new TreeTableModelWithColumns(new AsyncTreeModel(myTreeModel, this), COLUMNS); DefaultTableCellRenderer timeColumnCellRenderer = new DefaultTableCellRenderer() { @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); setHorizontalAlignment(SwingConstants.RIGHT); Color fg = isSelected ? UIUtil.getTreeSelectionForeground(hasFocus) : SimpleTextAttributes.GRAY_ATTRIBUTES.getFgColor(); setForeground(fg); return this; } }; TreeTable treeTable = new TreeTable(model) { @Override public TableCellRenderer getCellRenderer(int row, int column) { if (column == 1) { return timeColumnCellRenderer; } return super.getCellRenderer(row, column); } }; EditSourceOnDoubleClickHandler.install(treeTable); EditSourceOnEnterKeyHandler.install(treeTable, null); myTree = treeTable.getTree(); treeTable.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { treeTable.setSelectionBackground(UIUtil.getTreeSelectionBackground(true)); } @Override public void focusLost(FocusEvent e) { treeTable.setSelectionBackground(UIUtil.getTreeSelectionBackground(false)); } }); final TreeCellRenderer treeCellRenderer = myTree.getCellRenderer(); myTree.setCellRenderer(new TreeCellRenderer() { @Override public Component getTreeCellRendererComponent(JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) { final Component rendererComponent = treeCellRenderer.getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus); if (rendererComponent instanceof SimpleColoredComponent) { Color bg = UIUtil.getTreeBackground(selected, true); Color fg = UIUtil.getTreeForeground(selected, true); if (selected) { for (SimpleColoredComponent.ColoredIterator it = ((SimpleColoredComponent)rendererComponent).iterator(); it.hasNext(); ) { it.next(); int offset = it.getOffset(); int endOffset = it.getEndOffset(); SimpleTextAttributes currentAttributes = it.getTextAttributes(); SimpleTextAttributes newAttributes = new SimpleTextAttributes(bg, fg, currentAttributes.getWaveColor(), currentAttributes.getStyle()); it.split(endOffset - offset, newAttributes); } } SpeedSearchUtil.applySpeedSearchHighlighting(treeTable, (SimpleColoredComponent)rendererComponent, true, selected); } return rendererComponent; } }); new TreeTableSpeedSearch(treeTable).setComparator(new SpeedSearchComparator(false)); treeTable.setTableHeader(null); myTimeColumn = treeTable.getColumnModel().getColumn(1); myTimeColumn.setResizable(false); updateTimeColumnWidth("Running for " + StringUtil.formatDuration(11111L), true); TreeUtil.installActions(myTree); JPanel myContentPanel = new JPanel(); myContentPanel.setLayout(new CardLayout()); myContentPanel.add(ScrollPaneFactory.createScrollPane(treeTable, SideBorder.LEFT), TREE); myPanel.setLayout(new BorderLayout()); ThreeComponentsSplitter myThreeComponentsSplitter = new ThreeComponentsSplitter() { @Override public void doLayout() { super.doLayout(); JComponent detailsComponent = myDetailsHandler.getComponent(); if (detailsComponent != null && detailsComponent.isVisible()) { int firstSize = getFirstSize(); int lastSize = getLastSize(); if (firstSize == 0 && lastSize == 0) { int width = Math.round(getWidth() / 2f); if (width > 0) { setFirstSize(width); } } } } }; Disposer.register(this, myThreeComponentsSplitter); myThreeComponentsSplitter.setFirstComponent(myContentPanel); myDetailsHandler = new DetailsHandler(myProject, myTree, myThreeComponentsSplitter); myThreeComponentsSplitter.setLastComponent(myDetailsHandler.getComponent()); myPanel.add(myThreeComponentsSplitter, BorderLayout.CENTER); } private ExecutionNode getRootElement() { return myRootNode; } @Override public void print(@NotNull String text, @NotNull ConsoleViewContentType contentType) { } @Override public void clear() { getRootElement().removeChildren(); nodesMap.clear(); myDetailsHandler.clear(); myTreeModel.invalidate(); } @Override public void scrollTo(int offset) { } @Override public void attachToProcess(ProcessHandler processHandler) { } @Override public void setOutputPaused(boolean value) { } @Override public boolean isOutputPaused() { return false; } @Override public boolean hasDeferredOutput() { return false; } @Override public void performWhenNoDeferredOutput(@NotNull Runnable runnable) { } @Override public void setHelpId(@NotNull String helpId) { } @Override public void addMessageFilter(@NotNull Filter filter) { } @Override public void printHyperlink(@NotNull String hyperlinkText, @Nullable HyperlinkInfo info) { } @Override public int getContentSize() { return 0; } @Override public boolean canPause() { return false; } @NotNull @Override public AnAction[] createConsoleActions() { return AnAction.EMPTY_ARRAY; } @Override public void allowHeavyFilters() { } @Override public JComponent getComponent() { return myPanel; } @Override public JComponent getPreferredFocusableComponent() { return myTree; } @Override public void dispose() { myDisposed.set(true); } public boolean isDisposed() { return myDisposed.get(); } @Override public void onEvent(@NotNull BuildEvent event) { ExecutionNode parentNode = event.getParentId() == null ? null : nodesMap.get(event.getParentId()); ExecutionNode currentNode = nodesMap.get(event.getId()); if (event instanceof StartEvent || event instanceof MessageEvent) { ExecutionNode rootElement = getRootElement(); if (currentNode == null) { if (event instanceof StartBuildEvent) { currentNode = rootElement; } else { if (event instanceof MessageEvent) { MessageEvent messageEvent = (MessageEvent)event; parentNode = createMessageParentNodes(messageEvent, parentNode); } currentNode = new ExecutionNode(myProject, parentNode); } currentNode.setAutoExpandNode(currentNode == rootElement || parentNode == rootElement); nodesMap.put(event.getId(), currentNode); } else { LOG.warn("start event id collision found"); return; } if (parentNode != null) { parentNode.add(currentNode); } if (event instanceof StartBuildEvent) { String buildTitle = ((StartBuildEvent)event).getBuildTitle(); currentNode.setTitle(buildTitle); currentNode.setAutoExpandNode(true); scheduleUpdate(currentNode); } else if (event instanceof MessageEvent) { MessageEvent messageEvent = (MessageEvent)event; currentNode.setStartTime(messageEvent.getEventTime()); currentNode.setEndTime(messageEvent.getEventTime()); currentNode.setNavigatable(messageEvent.getNavigatable(myProject)); final MessageEventResult messageEventResult = messageEvent.getResult(); currentNode.setResult(messageEventResult); } } else { currentNode = nodesMap.get(event.getId()); if (currentNode == null && event instanceof ProgressBuildEvent) { currentNode = new ExecutionNode(myProject, parentNode); nodesMap.put(event.getId(), currentNode); if (parentNode != null) { parentNode.add(currentNode); } } } if (currentNode == null) { // TODO log error return; } currentNode.setName(event.getMessage()); currentNode.setHint(event.getHint()); if (currentNode.getStartTime() == 0) { currentNode.setStartTime(event.getEventTime()); } if (event instanceof FinishEvent) { currentNode.setEndTime(event.getEventTime()); currentNode.setResult(((FinishEvent)event).getResult()); int timeColumnWidth = new JLabel("__" + currentNode.getDuration(), SwingConstants.RIGHT).getPreferredSize().width; if (myTimeColumnWidth < timeColumnWidth) { myTimeColumnWidth = timeColumnWidth; } } else { scheduleUpdate(currentNode); } if (event instanceof FinishBuildEvent) { String aHint = event.getHint(); String time = DateFormatUtil.formatDateTime(event.getEventTime()); aHint = aHint == null ? "at " + time : aHint + " at " + time; currentNode.setHint(aHint); updateTimeColumnWidth(myTimeColumnWidth); if (myDetailsHandler.myExecutionNode == null) { myDetailsHandler.setNode(getRootElement()); } if (((FinishBuildEvent)event).getResult() instanceof FailureResult) { JTree tree = myTree; if (tree != null && !tree.isRootVisible()) { ExecutionNode rootElement = getRootElement(); ExecutionNode resultNode = new ExecutionNode(myProject, rootElement); resultNode.setName(StringUtil.toTitleCase(rootElement.getName())); resultNode.setHint(rootElement.getHint()); resultNode.setEndTime(rootElement.getEndTime()); resultNode.setStartTime(rootElement.getStartTime()); resultNode.setResult(rootElement.getResult()); resultNode.setTooltip(rootElement.getTooltip()); rootElement.add(resultNode); scheduleUpdate(resultNode); } } myTreeModel.invalidate(); } } void scheduleUpdate(ExecutionNode executionNode) { SimpleNode node = executionNode.getParent() == null ? executionNode : executionNode.getParent(); final Update update = new Update(node) { @Override public void run() { if (node.isAutoExpandNode() && node.getChildCount() > 0) { myTreeModel.expand(node, myTree, p -> {}); } myTreeModel.invalidate(node, true); } }; myLaterInvocator.queue(update); } private ExecutionNode createMessageParentNodes(MessageEvent messageEvent, ExecutionNode parentNode) { Object messageEventParentId = messageEvent.getParentId(); if (messageEventParentId == null) return null; String group = messageEvent.getGroup(); String groupNodeId = group.hashCode() + messageEventParentId.toString(); ExecutionNode messagesGroupNode = getOrCreateMessagesNode(messageEvent, groupNodeId, parentNode, null, group, true, null, null, nodesMap, myProject); EventResult groupNodeResult = messagesGroupNode.getResult(); final MessageEvent.Kind eventKind = messageEvent.getKind(); if (!(groupNodeResult instanceof MessageEventResult) || ((MessageEventResult)groupNodeResult).getKind().compareTo(eventKind) > 0) { messagesGroupNode.setResult(new MessageEventResult() { @Override public MessageEvent.Kind getKind() { return eventKind; } }); } if (messageEvent instanceof FileMessageEvent) { ExecutionNode fileParentNode = messagesGroupNode; FilePosition filePosition = ((FileMessageEvent)messageEvent).getFilePosition(); String filePath = FileUtil.toSystemIndependentName(filePosition.getFile().getPath()); String parentsPath = ""; String relativePath = FileUtil.getRelativePath(myWorkingDir, filePath, '/'); if (relativePath != null) { String nodeId = groupNodeId + myWorkingDir; ExecutionNode workingDirNode = getOrCreateMessagesNode(messageEvent, nodeId, messagesGroupNode, myWorkingDir, null, false, () -> AllIcons.Nodes.Module, null, nodesMap, myProject); parentsPath = myWorkingDir; fileParentNode = workingDirNode; } VirtualFile sourceRootForFile; VirtualFile ioFile = VfsUtil.findFileByIoFile(new File(filePath), false); if (ioFile != null && (sourceRootForFile = ProjectFileIndex.SERVICE.getInstance(myProject).getSourceRootForFile(ioFile)) != null) { relativePath = FileUtil.getRelativePath(parentsPath, sourceRootForFile.getPath(), '/'); if (relativePath != null) { parentsPath += ("/" + relativePath); String contentRootNodeId = groupNodeId + sourceRootForFile.getPath(); fileParentNode = getOrCreateMessagesNode(messageEvent, contentRootNodeId, fileParentNode, relativePath, null, false, () -> ProjectFileIndex.SERVICE.getInstance(myProject).isInTestSourceContent(ioFile) ? AllIcons.Modules.TestRoot : AllIcons.Modules.SourceRoot, null, nodesMap, myProject); } } String fileNodeId = groupNodeId + filePath; relativePath = StringUtil.isEmpty(parentsPath) ? filePath : FileUtil.getRelativePath(parentsPath, filePath, '/'); parentNode = getOrCreateMessagesNode(messageEvent, fileNodeId, fileParentNode, relativePath, null, false, () -> { VirtualFile file = VfsUtil.findFileByIoFile(filePosition.getFile(), false); if (file != null) { return file.getFileType().getIcon(); } return null; }, messageEvent.getNavigatable(myProject), nodesMap, myProject); } else { parentNode = messagesGroupNode; } if (eventKind == MessageEvent.Kind.ERROR || eventKind == MessageEvent.Kind.WARNING) { SimpleNode p = parentNode; do { ((ExecutionNode)p).reportChildMessageKind(eventKind); } while ((p = p.getParent()) instanceof ExecutionNode); } return parentNode; } @NotNull private static ExecutionNode getOrCreateMessagesNode(MessageEvent messageEvent, String nodeId, ExecutionNode parentNode, String nodeName, String nodeTitle, boolean autoExpandNode, @Nullable Supplier<? extends Icon> iconProvider, @Nullable Navigatable navigatable, Map<Object, ExecutionNode> nodesMap, Project project) { ExecutionNode node = nodesMap.get(nodeId); if (node == null) { node = new ExecutionNode(project, parentNode); node.setName(nodeName); node.setTitle(nodeTitle); if (autoExpandNode) { node.setAutoExpandNode(true); } node.setStartTime(messageEvent.getEventTime()); node.setEndTime(messageEvent.getEventTime()); if (iconProvider != null) { node.setIconProvider(iconProvider); } if (navigatable != null) { node.setNavigatable(navigatable); } parentNode.add(node); nodesMap.put(nodeId, node); } return node; } public void hideRootNode() { UIUtil.invokeLaterIfNeeded(() -> { if (myTree != null) { myTree.setRootVisible(false); myTree.setShowsRootHandles(true); } }); } private void updateTimeColumnWidth(String text, boolean force) { int timeColumnWidth = new JLabel(text, SwingConstants.RIGHT).getPreferredSize().width; if (myTimeColumnWidth > timeColumnWidth) { timeColumnWidth = myTimeColumnWidth; } if (force || myTimeColumn.getMaxWidth() < timeColumnWidth || myTimeColumn.getWidth() < timeColumnWidth) { updateTimeColumnWidth(timeColumnWidth); } } private void updateTimeColumnWidth(int width) { myTimeColumn.setPreferredWidth(width); myTimeColumn.setMinWidth(width); myTimeColumn.setMaxWidth(width); } @Nullable @Override public Object getData(@NotNull String dataId) { if (PlatformDataKeys.HELP_ID.is(dataId)) return "reference.build.tool.window"; if (CommonDataKeys.PROJECT.is(dataId)) return myProject; if (CommonDataKeys.NAVIGATABLE_ARRAY.is(dataId)) return extractNavigatables(); return null; } private Object extractNavigatables() { final List<Navigatable> navigatables = new ArrayList<>(); for (ExecutionNode each : getSelectedNodes()) { List<Navigatable> navigatable = each.getNavigatables(); navigatables.addAll(navigatable); } return navigatables.isEmpty() ? null : navigatables.toArray(new Navigatable[0]); } private ExecutionNode[] getSelectedNodes() { final ExecutionNode[] result = new ExecutionNode[0]; if (myTree != null) { final List<ExecutionNode> nodes = TreeUtil.collectSelectedObjects(myTree, path -> TreeUtil.getLastUserObject(ExecutionNode.class, path)); return nodes.toArray(result); } return result; } private static class DetailsHandler { private final ThreeComponentsSplitter mySplitter; @Nullable private ExecutionNode myExecutionNode; private final ConsoleView myConsole; private final JPanel myPanel; DetailsHandler(Project project, TreeTableTree tree, ThreeComponentsSplitter threeComponentsSplitter) { myConsole = TextConsoleBuilderFactory.getInstance().createBuilder(project).getConsole(); mySplitter = threeComponentsSplitter; myPanel = new JPanel(new BorderLayout()); JComponent consoleComponent = myConsole.getComponent(); AnAction[] consoleActions = myConsole.createConsoleActions(); consoleComponent.setFocusable(true); final Color editorBackground = EditorColorsManager.getInstance().getGlobalScheme().getDefaultBackground(); consoleComponent.setBorder(new CompoundBorder(IdeBorderFactory.createBorder(SideBorder.RIGHT), new SideBorder(editorBackground, SideBorder.LEFT))); myPanel.add(consoleComponent, BorderLayout.CENTER); final ActionToolbar toolbar = ActionManager.getInstance() .createActionToolbar("BuildResults", new DefaultActionGroup(consoleActions), false); myPanel.add(toolbar.getComponent(), BorderLayout.EAST); myPanel.setVisible(false); tree.addTreeSelectionListener(e -> { TreePath path = e.getPath(); if (path == null || !e.isAddedPath()) { return; } TreePath selectionPath = tree.getSelectionPath(); setNode(selectionPath != null ? (DefaultMutableTreeNode)selectionPath.getLastPathComponent() : null); }); Disposer.register(threeComponentsSplitter, myConsole); } public boolean setNode(@NotNull ExecutionNode node) { EventResult eventResult = node.getResult(); boolean hasChanged = false; if (eventResult instanceof FailureResult) { myConsole.clear(); List<? extends Failure> failures = ((FailureResult)eventResult).getFailures(); if (failures.isEmpty()) return false; for (Iterator<? extends Failure> iterator = failures.iterator(); iterator.hasNext(); ) { Failure failure = iterator.next(); String text = ObjectUtils.chooseNotNull(failure.getDescription(), failure.getMessage()); if (text == null && failure.getError() != null) { text = failure.getError().getMessage(); } if (text == null) continue; printDetails(failure, text); hasChanged = true; if (iterator.hasNext()) { myConsole.print("\n\n", ConsoleViewContentType.NORMAL_OUTPUT); } } } else if (eventResult instanceof MessageEventResult) { String details = ((MessageEventResult)eventResult).getDetails(); if (details == null) { return false; } if (details.isEmpty()) { return false; } myConsole.clear(); printDetails(null, details); hasChanged = true; } if (!hasChanged) return false; myConsole.scrollTo(0); int firstSize = mySplitter.getFirstSize(); int lastSize = mySplitter.getLastSize(); if (firstSize == 0 && lastSize == 0) { int width = Math.round(mySplitter.getWidth() / 2f); mySplitter.setFirstSize(width); } myPanel.setVisible(true); return true; } private boolean printDetails(Failure failure, @Nullable String details) { return BuildConsoleUtils.printDetails(myConsole, failure, details); } public void setNode(@Nullable DefaultMutableTreeNode node) { if (node == null || node.getUserObject() == myExecutionNode) return; if (node.getUserObject() instanceof ExecutionNode) { myExecutionNode = (ExecutionNode)node.getUserObject(); if (setNode((ExecutionNode)node.getUserObject())) { return; } } myExecutionNode = null; myPanel.setVisible(false); } public JComponent getComponent() { return myPanel; } public void clear() { myPanel.setVisible(false); myConsole.clear(); } } }
[BTW] expand 2 levels of output (workaround missing autoexpand in async tree)
platform/lang-impl/src/com/intellij/build/BuildTreeConsoleView.java
[BTW] expand 2 levels of output (workaround missing autoexpand in async tree)
<ide><path>latform/lang-impl/src/com/intellij/build/BuildTreeConsoleView.java <ide> final Update update = new Update(node) { <ide> @Override <ide> public void run() { <del> if (node.isAutoExpandNode() && node.getChildCount() > 0) { <del> myTreeModel.expand(node, myTree, p -> {}); <del> } <del> myTreeModel.invalidate(node, true); <add> myTreeModel.invalidate(node, true) <add> .onSuccess(p -> TreeUtil.expand(myTree, 2)); <ide> } <ide> }; <ide> myLaterInvocator.queue(update);
Java
mit
5fc3807716186ac0476d3b8e334828bb569159a0
0
saisrimat96/cs56-m16-lab03,saisrimat96/cs56-m16-lab03
import java.sql.*; import java.util.HashMap; import java.util.ArrayList; import java.util.Map; import java.net.URI; import java.net.URISyntaxException; import static spark.Spark.*; import spark.template.freemarker.FreeMarkerEngine; import spark.ModelAndView; import static spark.Spark.get; import com.heroku.sdk.jdbc.DatabaseUrl; import static javax.measure.unit.SI.KILOGRAM; import javax.measure.quantity.Mass; import org.jscience.physics.model.RelativisticModel; import org.jscience.physics.amount.Amount; public class Main { public static void main(String[] args) { port(Integer.valueOf(System.getenv("PORT"))); staticFileLocation("/public"); get("/hello", (req, res) -> { RelativisticModel.select(); String energy = System.getenv().get("ENERGY"); Amount<Mass> m = Amount.valueOf(energy).to(KILOGRAM); return "E=mc^2: " + energy + " = " + m.toString(); }); get("/ucsb", (req, res) -> "Go Gauchos"); get("/", (request, response) -> { Map<String, Object> attributes = new HashMap<>(); attributes.put("message", "Hello World!"); return new ModelAndView(attributes, "index.ftl"); }, new FreeMarkerEngine()); get("/db", (req, res) -> { Connection connection = null; Map<String, Object> attributes = new HashMap<>(); try { connection = DatabaseUrl.extract().getConnection(); Statement stmt = connection.createStatement(); stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)"); stmt.executeUpdate("INSERT INTO ticks VALUES (now())"); ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks"); ArrayList<String> output = new ArrayList<String>(); while (rs.next()) { output.add( "Read from DB: " + rs.getTimestamp("tick")); } attributes.put("results", output); return new ModelAndView(attributes, "db.ftl"); } catch (Exception e) { attributes.put("message", "There was an error: " + e); return new ModelAndView(attributes, "error.ftl"); } finally { if (connection != null) try{connection.close();} catch(SQLException e){} } }, new FreeMarkerEngine()); } }
src/main/java/Main.java
import java.sql.*; import java.util.HashMap; import java.util.ArrayList; import java.util.Map; import java.net.URI; import java.net.URISyntaxException; import static spark.Spark.*; import spark.template.freemarker.FreeMarkerEngine; import spark.ModelAndView; import static spark.Spark.get; import com.heroku.sdk.jdbc.DatabaseUrl; import static javax.measure.unit.SI.KILOGRAM; import javax.measure.quantity.Mass; import org.jscience.physics.model.RelativisticModel; import org.jscience.physics.amount.Amount; public class Main { public static void main(String[] args) { port(Integer.valueOf(System.getenv("PORT"))); staticFileLocation("/public"); get("/hello", (req, res) -> { RelativisticModel.select(); String energy = System.getenv().get("ENERGY"); Amount<Mass> m = Amount.valueOf(energy).to(KILOGRAM); return "E=mc^2: " + energy + " = " + m.toString(); }); get("/ucsb", (req, res) -> "Go Gauchos"); get("/", (request, response) -> { Map<String, Object> attributes = new HashMap<>(); attributes.put("message", "Hello World!"); return new ModelAndView(attributes, "index.ftl"); }, new FreeMarkerEngine()); get("/db", (req, res) -> { Connection connection = null; Map<String, Object> attributes = new HashMap<>(); try { connection = DatabaseUrl.extract().getConnection(); Statement stmt = connection.createStatement(); stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)"); stmt.executeUpdate("INSERT INTO ticks VALUES (now())"); ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks"); ArrayList<String> output = new ArrayList<String>(); while (rs.next()) { output.add( "Read from DB: " + rs.getTimestamp("tick")); } attributes.put("results", output); return new ModelAndView(attributes, "db.ftl"); } catch (Exception e) { attributes.put("message", "There was an error: " + e); return new ModelAndView(attributes, "error.ftl"); } finally { if (connection != null) try{connection.close();} catch(SQLException e){} } }, new FreeMarkerEngine()); } }
database
src/main/java/Main.java
database
<ide><path>rc/main/java/Main.java <ide> }, new FreeMarkerEngine()); <ide> <ide> get("/db", (req, res) -> { <del> Connection connection = null; <add> Connection connection = null; <ide> Map<String, Object> attributes = new HashMap<>(); <ide> try { <ide> connection = DatabaseUrl.extract().getConnection(); <ide> } <ide> }, new FreeMarkerEngine()); <ide> <add> <ide> } <ide> <ide> }
Java
agpl-3.0
036c41bcf0dda16b96317c33033f76493296054c
0
ozwillo/ozwillo-kernel,ozwillo/ozwillo-kernel,ozwillo/ozwillo-kernel
package oasis.web.userinfo; import java.io.IOException; import java.security.GeneralSecurityException; import java.util.Set; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.SecurityContext; import javax.ws.rs.core.UriInfo; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.webtoken.JsonWebSignature; import com.google.api.client.json.webtoken.JsonWebToken; import com.google.api.client.util.Key; import com.wordnik.swagger.annotations.Api; import com.wordnik.swagger.annotations.ApiOperation; import oasis.model.accounts.AccessToken; import oasis.model.accounts.Account; import oasis.model.accounts.AccountRepository; import oasis.model.accounts.AgentAccount; import oasis.model.accounts.UserAccount; import oasis.model.social.Identity; import oasis.model.social.IdentityRepository; import oasis.openidconnect.OpenIdConnectModule; import oasis.web.authn.Authenticated; import oasis.web.authn.OAuth; import oasis.web.authn.OAuthPrincipal; @Authenticated @OAuth @Path("/a/userinfo") @Api(value = "/a/userinfo", description = "UserInfo Endpoint") public class UserInfoEndpoint { private static final JsonWebSignature.Header JWS_HEADER = new JsonWebSignature.Header().setType("JWS").setAlgorithm("RS256"); private static final DateTimeFormatter BIRTHDATE_FORMATTER = ISODateTimeFormat.date().withDefaultYear(0); private static final String EMAIL_SCOPE = "email"; private static final String PROFILE_SCOPE = "profile"; private static final String PHONE_SCOPE = "phone"; private static final String ADDRESS_SCOPE = "address"; private static final String APPLICATION_JWT = "application/jwt"; @Context UriInfo uriInfo; @Context SecurityContext securityContext; @Inject OpenIdConnectModule.Settings settings; @Inject JsonFactory jsonFactory; @Inject IdentityRepository identityRepository; @Inject AccountRepository accountRepository; @GET @Produces(APPLICATION_JWT) @ApiOperation( value = "Return Claims about the End-User in signed JWT format.", notes = "See the <a href=\"http://openid.net/specs/openid-connect-basic-1_0.html#UserInfo\">OpenID Connect Draft</a>, " + "the <a href=\"http://tools.ietf.org/html/draft-ietf-oauth-json-web-token-08\">JWT Draft</a> " + "and the <a href=\"http://tools.ietf.org/html/draft-ietf-jose-json-web-signature-11\">JWS Draft</a> for more information." ) public Response getSigned() throws GeneralSecurityException, IOException { UserInfo userInfo = getUserInfo(); userInfo.setIssuer(uriInfo.getBaseUri().toString()); AccessToken accessToken = ((OAuthPrincipal) securityContext.getUserPrincipal()).getAccessToken(); userInfo.setAudience(accessToken.getServiceProviderId()); String signedJwt = JsonWebSignature.signUsingRsaSha256( settings.keyPair.getPrivate(), jsonFactory, JWS_HEADER, userInfo ); return Response.ok().entity(signedJwt).build(); } @GET @Produces(MediaType.APPLICATION_JSON) @ApiOperation( value = "Return Claims about the End-User in JSON format.", notes = "See the <a href=\"http://openid.net/specs/openid-connect-basic-1_0.html#UserInfo\">OpenID Connect Draft</a> for more information." ) public Response getUnsigned() throws IOException { UserInfo userInfo = getUserInfo(); String json = jsonFactory.toString(userInfo); return Response.ok().entity(json).build(); } @POST @Produces("application/jwt") @ApiOperation( value = "Return Claims about the End-User in signed JWT format.", notes = "See the <a href=\"http://openid.net/specs/openid-connect-basic-1_0.html#UserInfo\">OpenID Connect Draft</a>, " + "the <a href=\"http://tools.ietf.org/html/draft-ietf-oauth-json-web-token-08\">JWT Draft</a> " + "and the <a href=\"http://tools.ietf.org/html/draft-ietf-jose-json-web-signature-11\">JWS Draft</a> for more information." ) public Response postSigned() throws GeneralSecurityException, IOException { return getSigned(); } @POST @Produces(MediaType.APPLICATION_JSON) @ApiOperation( value = "Return Claims about the End-User in JSON format.", notes = "See the <a href=\"http://openid.net/specs/openid-connect-basic-1_0.html#UserInfo\">OpenID Connect Draft</a> for more information." ) public Response postUnsigned() throws IOException { return getUnsigned(); } private UserInfo getUserInfo() { OAuthPrincipal oAuthPrincipal = (OAuthPrincipal) securityContext.getUserPrincipal(); Account account = accountRepository.getAccountByTokenId(oAuthPrincipal.getAccessToken().getId()); if (!(account instanceof UserAccount)) { throw invalidTokenResponse(); } UserAccount userAccount = (UserAccount) account; AccessToken accessToken = oAuthPrincipal.getAccessToken(); assert accessToken != null; Set<String> scopeIds = accessToken.getScopeIds(); Identity identity = identityRepository.getIdentity(userAccount.getIdentityId()); UserInfo userInfo = getUserInfo(userAccount, identity, scopeIds); userInfo.setSubject(userAccount.getId()); return userInfo; } private UserInfo getUserInfo(UserAccount userAccount, Identity identity, Set<String> scopeIds) { UserInfo userInfo = new UserInfo(); if (scopeIds.contains(PROFILE_SCOPE)) { String birthDate = identity.getBirthdate() != null ? identity.getBirthdate().toString(BIRTHDATE_FORMATTER) : null; userInfo.setName(identity.getName()) .setFamilyName(identity.getFamilyName()) .setGivenName(identity.getGivenName()) .setMiddleName(identity.getMiddleName()) .setNickname(identity.getNickname()) .setPicture(userAccount.getPicture()) .setGender(identity.getGender()) .setBirthdate(birthDate) .setZoneinfo(userAccount.getZoneInfo()) .setLocale(userAccount.getLocale()); } if (scopeIds.contains(EMAIL_SCOPE) && userAccount.getEmailAddress() != null) { userInfo.setEmail(userAccount.getEmailAddress()); userInfo.setEmailVerified(true); // A user account is created only if the email is verified } if (scopeIds.contains(ADDRESS_SCOPE) && identity.getAddress() != null) { UserInfo.Address address = new UserInfo.Address() .setStreetAddress(identity.getAddress().getStreetAddress()) .setLocality(identity.getAddress().getLocality()) .setRegion(identity.getAddress().getRegion()) .setPostalCode(identity.getAddress().getPostalCode()) .setCountry(identity.getAddress().getCountry()); userInfo.setAddress(address); } if (scopeIds.contains(PHONE_SCOPE) && identity.getPhoneNumber() != null) { userInfo.setPhone(identity.getPhoneNumber()); userInfo.setPhoneVerified(identity.isPhoneNumberVerified()); } long updatedAt = Math.max(userAccount.getModified(), identity.getUpdatedAt()); if (updatedAt > 0) { userInfo.setUpdatedAt(TimeUnit.MILLISECONDS.toSeconds(updatedAt)); } if (userAccount instanceof AgentAccount) { AgentAccount agentAccount = (AgentAccount) userAccount; userInfo.setIsAdmin(agentAccount.isAdmin()); userInfo.setOrganizationId(agentAccount.getOrganizationId()); } return userInfo; } private WebApplicationException insufficientScopeResponse() { return errorResponse(Response.Status.FORBIDDEN, "insufficient_scope"); } private WebApplicationException invalidTokenResponse() { return errorResponse(Response.Status.UNAUTHORIZED, "invalid_token"); } private WebApplicationException errorResponse(Response.Status status, String errorCode) { return new WebApplicationException(Response.status(status).header(HttpHeaders.WWW_AUTHENTICATE, "Bearer error=\"" + errorCode + "\"").build()); } private static class UserInfo extends JsonWebToken.Payload { // Profile @Key private String name; @Key private String family_name; @Key private String given_name; @Key private String middle_name; @Key private String nickname; @Key private String picture; @Key private String gender; @Key private String birthdate; @Key private String zoneinfo; @Key private String locale; @Key private Long updated_at; // Email @Key private String email; @Key private Boolean email_verified; // Address @Key private Address address; // Phone @Key private String phone; @Key private Boolean phone_verified; // Agent information @Key private Boolean adm; @Key private String org; public String getName() { return name; } public UserInfo setName(String name) { this.name = name; return this; } public String getFamilyName() { return family_name; } public UserInfo setFamilyName(String familyName) { this.family_name = familyName; return this; } public String getGivenName() { return given_name; } public UserInfo setGivenName(String givenName) { this.given_name = givenName; return this; } public String getMiddleName() { return middle_name; } public UserInfo setMiddleName(String middle_name) { this.middle_name = middle_name; return this; } public String getNickname() { return nickname; } public UserInfo setNickname(String nickname) { this.nickname = nickname; return this; } public String getPicture() { return picture; } public UserInfo setPicture(String picture) { this.picture = picture; return this; } public String getGender() { return gender; } public UserInfo setGender(String gender) { this.gender = gender; return this; } public String getBirthdate() { return birthdate; } public UserInfo setBirthdate(String birthdate) { this.birthdate = birthdate; return this; } public String getZoneinfo() { return zoneinfo; } public UserInfo setZoneinfo(String zoneinfo) { this.zoneinfo = zoneinfo; return this; } public String getLocale() { return locale; } public UserInfo setLocale(String locale) { this.locale = locale; return this; } public Long getUpdatedAt() { return updated_at; } public UserInfo setUpdatedAt(Long updated_at) { this.updated_at = updated_at; return this; } public String getEmail() { return email; } public UserInfo setEmail(String email) { this.email = email; return this; } public Boolean isEmailVerified() { return email_verified; } public UserInfo setEmailVerified(Boolean email_verified) { this.email_verified = email_verified; return this; } public Address getAddress() { return address; } public UserInfo setAddress(Address address) { this.address = address; return this; } public String getPhone() { return phone; } public UserInfo setPhone(String phone) { this.phone = phone; return this; } public Boolean isPhoneVerified() { return phone_verified; } public UserInfo setPhoneVerified(Boolean phone_verified) { this.phone_verified = phone_verified; return this; } public Boolean getIsAdmin() { return adm; } public void setIsAdmin(Boolean isAdmin) { this.adm = isAdmin; } public String getOrganizationId() { return org; } public void setOrganizationId(String organizationId) { this.org = organizationId; } @Override public UserInfo setExpirationTimeSeconds(Long expirationTimeSeconds) { super.setExpirationTimeSeconds(expirationTimeSeconds); return this; } @Override public UserInfo setNotBeforeTimeSeconds(Long notBeforeTimeSeconds) { super.setNotBeforeTimeSeconds(notBeforeTimeSeconds); return this; } @Override public UserInfo setIssuedAtTimeSeconds(Long issuedAtTimeSeconds) { super.setIssuedAtTimeSeconds(issuedAtTimeSeconds); return this; } @Override public UserInfo setIssuer(String issuer) { super.setIssuer(issuer); return this; } @Override public UserInfo setAudience(Object audience) { super.setAudience(audience); return this; } @Override public UserInfo setJwtId(String jwtId) { super.setJwtId(jwtId); return this; } @Override public UserInfo setType(String type) { super.setType(type); return this; } @Override public UserInfo setSubject(String subject) { super.setSubject(subject); return this; } @Override public UserInfo set(String fieldName, Object value) { super.set(fieldName, value); return this; } private static class Address { @Key private String street_address; @Key private String locality; @Key private String region; @Key private String postal_code; @Key private String country; public String getStreetAddress() { return street_address; } public Address setStreetAddress(String street_address) { this.street_address = street_address; return this; } public String getLocality() { return locality; } public Address setLocality(String locality) { this.locality = locality; return this; } public String getRegion() { return region; } public Address setRegion(String region) { this.region = region; return this; } public String getPostalCode() { return postal_code; } public Address setPostalCode(String postal_code) { this.postal_code = postal_code; return this; } public String getCountry() { return country; } public Address setCountry(String country) { this.country = country; return this; } } } }
oasis-webapp/src/main/java/oasis/web/userinfo/UserInfoEndpoint.java
package oasis.web.userinfo; import java.io.IOException; import java.security.GeneralSecurityException; import java.util.Set; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.SecurityContext; import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.ISODateTimeFormat; import com.google.api.client.json.JsonFactory; import com.google.api.client.json.webtoken.JsonWebSignature; import com.google.api.client.json.webtoken.JsonWebToken; import com.google.api.client.util.Key; import com.wordnik.swagger.annotations.Api; import com.wordnik.swagger.annotations.ApiOperation; import oasis.model.accounts.AccessToken; import oasis.model.accounts.Account; import oasis.model.accounts.AccountRepository; import oasis.model.accounts.AgentAccount; import oasis.model.accounts.UserAccount; import oasis.model.social.Identity; import oasis.model.social.IdentityRepository; import oasis.openidconnect.OpenIdConnectModule; import oasis.web.authn.Authenticated; import oasis.web.authn.OAuth; import oasis.web.authn.OAuthPrincipal; @Authenticated @OAuth @Path("/a/userinfo") @Api(value = "/a/userinfo", description = "UserInfo Endpoint") public class UserInfoEndpoint { private static final JsonWebSignature.Header JWS_HEADER = new JsonWebSignature.Header().setType("JWS").setAlgorithm("RS256"); private static final DateTimeFormatter BIRTHDATE_FORMATTER = ISODateTimeFormat.date().withDefaultYear(0); private static final String EMAIL_SCOPE = "email"; private static final String PROFILE_SCOPE = "profile"; private static final String PHONE_SCOPE = "phone"; private static final String ADDRESS_SCOPE = "address"; private static final String APPLICATION_JWT = "application/jwt"; @Context SecurityContext securityContext; @Inject OpenIdConnectModule.Settings settings; @Inject JsonFactory jsonFactory; @Inject IdentityRepository identityRepository; @Inject AccountRepository accountRepository; @GET @Produces(APPLICATION_JWT) @ApiOperation( value = "Return Claims about the End-User in signed JWT format.", notes = "See the <a href=\"http://openid.net/specs/openid-connect-basic-1_0.html#UserInfo\">OpenID Connect Draft</a>, " + "the <a href=\"http://tools.ietf.org/html/draft-ietf-oauth-json-web-token-08\">JWT Draft</a> " + "and the <a href=\"http://tools.ietf.org/html/draft-ietf-jose-json-web-signature-11\">JWS Draft</a> for more information." ) public Response getSigned() throws GeneralSecurityException, IOException { UserInfo userInfo = getUserInfo(); String signedJwt = JsonWebSignature.signUsingRsaSha256( settings.keyPair.getPrivate(), jsonFactory, JWS_HEADER, userInfo ); return Response.ok().entity(signedJwt).build(); } @GET @Produces(MediaType.APPLICATION_JSON) @ApiOperation( value = "Return Claims about the End-User in JSON format.", notes = "See the <a href=\"http://openid.net/specs/openid-connect-basic-1_0.html#UserInfo\">OpenID Connect Draft</a> for more information." ) public Response getUnsigned() throws IOException { UserInfo userInfo = getUserInfo(); String json = jsonFactory.toString(userInfo); return Response.ok().entity(json).build(); } @POST @Produces("application/jwt") @ApiOperation( value = "Return Claims about the End-User in signed JWT format.", notes = "See the <a href=\"http://openid.net/specs/openid-connect-basic-1_0.html#UserInfo\">OpenID Connect Draft</a>, " + "the <a href=\"http://tools.ietf.org/html/draft-ietf-oauth-json-web-token-08\">JWT Draft</a> " + "and the <a href=\"http://tools.ietf.org/html/draft-ietf-jose-json-web-signature-11\">JWS Draft</a> for more information." ) public Response postSigned() throws GeneralSecurityException, IOException { return getSigned(); } @POST @Produces(MediaType.APPLICATION_JSON) @ApiOperation( value = "Return Claims about the End-User in JSON format.", notes = "See the <a href=\"http://openid.net/specs/openid-connect-basic-1_0.html#UserInfo\">OpenID Connect Draft</a> for more information." ) public Response postUnsigned() throws IOException { return getUnsigned(); } private UserInfo getUserInfo() { OAuthPrincipal oAuthPrincipal = (OAuthPrincipal) securityContext.getUserPrincipal(); Account account = accountRepository.getAccountByTokenId(oAuthPrincipal.getAccessToken().getId()); if (!(account instanceof UserAccount)) { throw invalidTokenResponse(); } UserAccount userAccount = (UserAccount) account; AccessToken accessToken = oAuthPrincipal.getAccessToken(); assert accessToken != null; Set<String> scopeIds = accessToken.getScopeIds(); Identity identity = identityRepository.getIdentity(userAccount.getIdentityId()); UserInfo userInfo = getUserInfo(userAccount, identity, scopeIds); userInfo.setSubject(userAccount.getId()); return userInfo; } private UserInfo getUserInfo(UserAccount userAccount, Identity identity, Set<String> scopeIds) { UserInfo userInfo = new UserInfo(); if (scopeIds.contains(PROFILE_SCOPE)) { String birthDate = identity.getBirthdate() != null ? identity.getBirthdate().toString(BIRTHDATE_FORMATTER) : null; userInfo.setName(identity.getName()) .setFamilyName(identity.getFamilyName()) .setGivenName(identity.getGivenName()) .setMiddleName(identity.getMiddleName()) .setNickname(identity.getNickname()) .setPicture(userAccount.getPicture()) .setGender(identity.getGender()) .setBirthdate(birthDate) .setZoneinfo(userAccount.getZoneInfo()) .setLocale(userAccount.getLocale()); } if (scopeIds.contains(EMAIL_SCOPE) && userAccount.getEmailAddress() != null) { userInfo.setEmail(userAccount.getEmailAddress()); userInfo.setEmailVerified(true); // A user account is created only if the email is verified } if (scopeIds.contains(ADDRESS_SCOPE) && identity.getAddress() != null) { UserInfo.Address address = new UserInfo.Address() .setStreetAddress(identity.getAddress().getStreetAddress()) .setLocality(identity.getAddress().getLocality()) .setRegion(identity.getAddress().getRegion()) .setPostalCode(identity.getAddress().getPostalCode()) .setCountry(identity.getAddress().getCountry()); userInfo.setAddress(address); } if (scopeIds.contains(PHONE_SCOPE) && identity.getPhoneNumber() != null) { userInfo.setPhone(identity.getPhoneNumber()); userInfo.setPhoneVerified(identity.isPhoneNumberVerified()); } long updatedAt = Math.max(userAccount.getModified(), identity.getUpdatedAt()); if (updatedAt > 0) { userInfo.setUpdatedAt(TimeUnit.MILLISECONDS.toSeconds(updatedAt)); } if (userAccount instanceof AgentAccount) { AgentAccount agentAccount = (AgentAccount) userAccount; userInfo.setIsAdmin(agentAccount.isAdmin()); userInfo.setOrganizationId(agentAccount.getOrganizationId()); } return userInfo; } private WebApplicationException insufficientScopeResponse() { return errorResponse(Response.Status.FORBIDDEN, "insufficient_scope"); } private WebApplicationException invalidTokenResponse() { return errorResponse(Response.Status.UNAUTHORIZED, "invalid_token"); } private WebApplicationException errorResponse(Response.Status status, String errorCode) { return new WebApplicationException(Response.status(status).header(HttpHeaders.WWW_AUTHENTICATE, "Bearer error=\"" + errorCode + "\"").build()); } private static class UserInfo extends JsonWebToken.Payload { // Profile @Key private String name; @Key private String family_name; @Key private String given_name; @Key private String middle_name; @Key private String nickname; @Key private String picture; @Key private String gender; @Key private String birthdate; @Key private String zoneinfo; @Key private String locale; @Key private Long updated_at; // Email @Key private String email; @Key private Boolean email_verified; // Address @Key private Address address; // Phone @Key private String phone; @Key private Boolean phone_verified; // Agent information @Key private Boolean adm; @Key private String org; public String getName() { return name; } public UserInfo setName(String name) { this.name = name; return this; } public String getFamilyName() { return family_name; } public UserInfo setFamilyName(String familyName) { this.family_name = familyName; return this; } public String getGivenName() { return given_name; } public UserInfo setGivenName(String givenName) { this.given_name = givenName; return this; } public String getMiddleName() { return middle_name; } public UserInfo setMiddleName(String middle_name) { this.middle_name = middle_name; return this; } public String getNickname() { return nickname; } public UserInfo setNickname(String nickname) { this.nickname = nickname; return this; } public String getPicture() { return picture; } public UserInfo setPicture(String picture) { this.picture = picture; return this; } public String getGender() { return gender; } public UserInfo setGender(String gender) { this.gender = gender; return this; } public String getBirthdate() { return birthdate; } public UserInfo setBirthdate(String birthdate) { this.birthdate = birthdate; return this; } public String getZoneinfo() { return zoneinfo; } public UserInfo setZoneinfo(String zoneinfo) { this.zoneinfo = zoneinfo; return this; } public String getLocale() { return locale; } public UserInfo setLocale(String locale) { this.locale = locale; return this; } public Long getUpdatedAt() { return updated_at; } public UserInfo setUpdatedAt(Long updated_at) { this.updated_at = updated_at; return this; } public String getEmail() { return email; } public UserInfo setEmail(String email) { this.email = email; return this; } public Boolean isEmailVerified() { return email_verified; } public UserInfo setEmailVerified(Boolean email_verified) { this.email_verified = email_verified; return this; } public Address getAddress() { return address; } public UserInfo setAddress(Address address) { this.address = address; return this; } public String getPhone() { return phone; } public UserInfo setPhone(String phone) { this.phone = phone; return this; } public Boolean isPhoneVerified() { return phone_verified; } public UserInfo setPhoneVerified(Boolean phone_verified) { this.phone_verified = phone_verified; return this; } public Boolean getIsAdmin() { return adm; } public void setIsAdmin(Boolean isAdmin) { this.adm = isAdmin; } public String getOrganizationId() { return org; } public void setOrganizationId(String organizationId) { this.org = organizationId; } @Override public UserInfo setExpirationTimeSeconds(Long expirationTimeSeconds) { super.setExpirationTimeSeconds(expirationTimeSeconds); return this; } @Override public UserInfo setNotBeforeTimeSeconds(Long notBeforeTimeSeconds) { super.setNotBeforeTimeSeconds(notBeforeTimeSeconds); return this; } @Override public UserInfo setIssuedAtTimeSeconds(Long issuedAtTimeSeconds) { super.setIssuedAtTimeSeconds(issuedAtTimeSeconds); return this; } @Override public UserInfo setIssuer(String issuer) { super.setIssuer(issuer); return this; } @Override public UserInfo setAudience(Object audience) { super.setAudience(audience); return this; } @Override public UserInfo setJwtId(String jwtId) { super.setJwtId(jwtId); return this; } @Override public UserInfo setType(String type) { super.setType(type); return this; } @Override public UserInfo setSubject(String subject) { super.setSubject(subject); return this; } @Override public UserInfo set(String fieldName, Object value) { super.set(fieldName, value); return this; } private static class Address { @Key private String street_address; @Key private String locality; @Key private String region; @Key private String postal_code; @Key private String country; public String getStreetAddress() { return street_address; } public Address setStreetAddress(String street_address) { this.street_address = street_address; return this; } public String getLocality() { return locality; } public Address setLocality(String locality) { this.locality = locality; return this; } public String getRegion() { return region; } public Address setRegion(String region) { this.region = region; return this; } public String getPostalCode() { return postal_code; } public Address setPostalCode(String postal_code) { this.postal_code = postal_code; return this; } public String getCountry() { return country; } public Address setCountry(String country) { this.country = country; return this; } } } }
Add Issuer and Audience to the response of the UserInfoEndpoint if signed. Change-Id: Ibf0f2027aa44a5785f0ef6b2884a7ae42ecdfedb
oasis-webapp/src/main/java/oasis/web/userinfo/UserInfoEndpoint.java
Add Issuer and Audience to the response of the UserInfoEndpoint if signed.
<ide><path>asis-webapp/src/main/java/oasis/web/userinfo/UserInfoEndpoint.java <ide> import javax.ws.rs.core.MediaType; <ide> import javax.ws.rs.core.Response; <ide> import javax.ws.rs.core.SecurityContext; <add>import javax.ws.rs.core.UriInfo; <ide> <ide> import org.joda.time.format.DateTimeFormatter; <ide> import org.joda.time.format.ISODateTimeFormat; <ide> private static final String ADDRESS_SCOPE = "address"; <ide> private static final String APPLICATION_JWT = "application/jwt"; <ide> <add> @Context UriInfo uriInfo; <ide> @Context SecurityContext securityContext; <ide> @Inject OpenIdConnectModule.Settings settings; <ide> @Inject JsonFactory jsonFactory; <ide> ) <ide> public Response getSigned() throws GeneralSecurityException, IOException { <ide> UserInfo userInfo = getUserInfo(); <add> userInfo.setIssuer(uriInfo.getBaseUri().toString()); <add> AccessToken accessToken = ((OAuthPrincipal) securityContext.getUserPrincipal()).getAccessToken(); <add> userInfo.setAudience(accessToken.getServiceProviderId()); <ide> <ide> String signedJwt = JsonWebSignature.signUsingRsaSha256( <ide> settings.keyPair.getPrivate(),
Java
apache-2.0
2a1b8d1935bba52d3beb861baecd533a1770b5c6
0
jsonking/mongo-java-driver,gianpaj/mongo-java-driver,jsonking/mongo-java-driver,jyemin/mongo-java-driver,rozza/mongo-java-driver,PSCGroup/mongo-java-driver,rozza/mongo-java-driver,jyemin/mongo-java-driver,kay-kim/mongo-java-driver
/* * Copyright (c) 2008 - 2013 10gen, Inc. <http://10gen.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mongodb.connection; import org.mongodb.annotations.Immutable; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Immutable snapshot state of a cluster. */ @Immutable public class ClusterDescription { private final List<ServerDescription> all; private final int acceptableLatencyMS; public ClusterDescription(final List<ServerDescription> all, final int acceptableLatencyMS) { this.all = Collections.unmodifiableList(new ArrayList<ServerDescription>(all)); this.acceptableLatencyMS = acceptableLatencyMS; } public List<ServerDescription> getAll() { return all; } /** * While it may seem counter-intuitive that a MongoDb cluster can have more than one primary, * it can in the case where the client's view of the cluster is a set of mongos servers, any of which can serve as the primary. * * @return a list of servers that can act as primaries\ */ public List<ServerDescription> getPrimaries() { List<ServerDescription> primaries = getAllGoodPrimaries(all); return getAllServersWithAcceptableLatency(primaries, calculateBestPingTime(primaries), acceptableLatencyMS); } public List<ServerDescription> getSecondaries() { List<ServerDescription> secondaries = getAllGoodSecondaries(all); return getAllServersWithAcceptableLatency(secondaries, calculateBestPingTime(secondaries), acceptableLatencyMS); } public List<ServerDescription> getSecondaries(final Tags tags) { List<ServerDescription> taggedServers = getServersByTags(all, tags); List<ServerDescription> taggedSecondaries = getAllGoodSecondaries(taggedServers); return getAllServersWithAcceptableLatency(taggedSecondaries, calculateBestPingTime(taggedSecondaries), acceptableLatencyMS); } public List<ServerDescription> getAny() { List<ServerDescription> any = getAllGoodServers(all); return getAllServersWithAcceptableLatency(any, calculateBestPingTime(any), acceptableLatencyMS); } public List<ServerDescription> getAny(final Tags tags) { List<ServerDescription> taggedServers = getServersByTags(all, tags); List<ServerDescription> taggedAny = getAllGoodServers(taggedServers); return getAllServersWithAcceptableLatency(taggedAny, calculateBestPingTime(taggedAny), acceptableLatencyMS); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("[ "); for (final ServerDescription node : getAll()) { sb.append(node).append(","); } sb.setLength(sb.length() - 1); //remove last comma sb.append(" ]"); return sb.toString(); } static float calculateBestPingTime(final List<ServerDescription> members) { float bestPingTime = Float.MAX_VALUE; for (final ServerDescription cur : members) { if (!cur.isSecondary()) { continue; } if (cur.getAveragePingTimeMillis() < bestPingTime) { bestPingTime = cur.getAveragePingTimeMillis(); } } return bestPingTime; } static List<ServerDescription> getAllGoodPrimaries(final List<ServerDescription> servers) { final List<ServerDescription> goodPrimaries = new ArrayList<ServerDescription>(servers.size()); for (final ServerDescription cur : servers) { if (cur.isPrimary()) { goodPrimaries.add(cur); } } return goodPrimaries; } static List<ServerDescription> getAllGoodServers(final List<ServerDescription> servers) { final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(servers.size()); for (final ServerDescription cur : servers) { if (cur.isOk()) { goodSecondaries.add(cur); } } return goodSecondaries; } static List<ServerDescription> getAllGoodSecondaries(final List<ServerDescription> server) { final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(server.size()); for (final ServerDescription cur : server) { if (cur.isSecondary()) { goodSecondaries.add(cur); } } return goodSecondaries; } static List<ServerDescription> getAllServersWithAcceptableLatency(final List<ServerDescription> servers, final float bestPingTime, final int acceptableLatencyMS) { final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(servers.size()); for (final ServerDescription cur : servers) { if (cur.getAveragePingTimeMillis() - acceptableLatencyMS <= bestPingTime) { goodSecondaries.add(cur); } } return goodSecondaries; } static List<ServerDescription> getServersByTags(final List<ServerDescription> servers, final Tags tags) { final List<ServerDescription> membersByTag = new ArrayList<ServerDescription>(); for (final ServerDescription cur : servers) { if (cur.hasTags(tags)) { membersByTag.add(cur); } } return membersByTag; } }
driver/src/main/org/mongodb/connection/ClusterDescription.java
/* * Copyright (c) 2008 - 2013 10gen, Inc. <http://10gen.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mongodb.connection; import org.mongodb.annotations.Immutable; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Immutable snapshot state of a cluster. */ @Immutable public class ClusterDescription { private final List<ServerDescription> all; private final List<ServerDescription> goodPrimaries; private final List<ServerDescription> goodSecondaries; private final List<ServerDescription> goodMembers; private final int acceptableLatencyMS; public ClusterDescription(final List<ServerDescription> serverDescriptions, final int acceptableLatencyMS) { this.all = Collections.unmodifiableList(new ArrayList<ServerDescription>(serverDescriptions)); this.acceptableLatencyMS = acceptableLatencyMS; this.goodPrimaries = Collections.unmodifiableList(calculateGoodPrimaries(all, calculateBestPingTime(all), acceptableLatencyMS)); this.goodSecondaries = Collections.unmodifiableList(calculateGoodSecondaries(all, calculateBestPingTime(all), acceptableLatencyMS)); this.goodMembers = Collections.unmodifiableList(calculateGoodMembers(all, calculateBestPingTime(all), acceptableLatencyMS)); } public List<ServerDescription> getAll() { return all; } /** * While it may seem counter-intuitive that a MongoDb cluster can have more than one primary, * it can in the case where the client's view of the cluster is a set of mongos servers, any of which can serve as the primary. * * @return a list of servers that can act as primaries\ */ public List<ServerDescription> getPrimaries() { return goodPrimaries; } public List<ServerDescription> getSecondaries() { return goodSecondaries; } public List<ServerDescription> getSecondaries(final Tags tags) { List<ServerDescription> taggedSecondaries = getMembersByTags(all, tags); return calculateGoodSecondaries(taggedSecondaries, calculateBestPingTime(taggedSecondaries), acceptableLatencyMS); } public List<ServerDescription> getAny() { return goodMembers; } public List<ServerDescription> getAny(final Tags tags) { List<ServerDescription> taggedMembers = getMembersByTags(all, tags); return calculateGoodMembers(taggedMembers, calculateBestPingTime(taggedMembers), acceptableLatencyMS); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("[ "); for (final ServerDescription node : getAll()) { sb.append(node).append(","); } sb.setLength(sb.length() - 1); //remove last comma sb.append(" ]"); return sb.toString(); } static float calculateBestPingTime(final List<ServerDescription> members) { float bestPingTime = Float.MAX_VALUE; for (final ServerDescription cur : members) { if (!cur.isSecondary()) { continue; } if (cur.getAveragePingTimeMillis() < bestPingTime) { bestPingTime = cur.getAveragePingTimeMillis(); } } return bestPingTime; } static List<ServerDescription> calculateGoodPrimaries(final List<ServerDescription> members, final float bestPingTime, final int acceptableLatencyMS) { final List<ServerDescription> goodPrimaries = new ArrayList<ServerDescription>(members.size()); for (final ServerDescription cur : members) { if (!cur.isPrimary()) { continue; } if (cur.getAveragePingTimeMillis() - acceptableLatencyMS <= bestPingTime) { goodPrimaries.add(cur); } } return goodPrimaries; } static List<ServerDescription> calculateGoodMembers(final List<ServerDescription> members, final float bestPingTime, final int acceptableLatencyMS) { final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(members.size()); for (final ServerDescription cur : members) { if (!cur.isOk()) { continue; } if (cur.getAveragePingTimeMillis() - acceptableLatencyMS <= bestPingTime) { goodSecondaries.add(cur); } } return goodSecondaries; } static List<ServerDescription> calculateGoodSecondaries(final List<ServerDescription> members, final float bestPingTime, final int acceptableLatencyMS) { final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(members.size()); for (final ServerDescription cur : members) { if (!cur.isSecondary()) { continue; } if (cur.getAveragePingTimeMillis() - acceptableLatencyMS <= bestPingTime) { goodSecondaries.add(cur); } } return goodSecondaries; } static List<ServerDescription> getMembersByTags(final List<ServerDescription> members, final Tags tags) { final List<ServerDescription> membersByTag = new ArrayList<ServerDescription>(); for (final ServerDescription cur : members) { if (cur.hasTags(tags)) { membersByTag.add(cur); } } return membersByTag; } }
Refactored ClusterDescription in preparation for moving acceptable latency test out.
driver/src/main/org/mongodb/connection/ClusterDescription.java
Refactored ClusterDescription in preparation for moving acceptable latency test out.
<ide><path>river/src/main/org/mongodb/connection/ClusterDescription.java <ide> @Immutable <ide> public class ClusterDescription { <ide> private final List<ServerDescription> all; <del> private final List<ServerDescription> goodPrimaries; <del> private final List<ServerDescription> goodSecondaries; <del> private final List<ServerDescription> goodMembers; <ide> <ide> private final int acceptableLatencyMS; <ide> <del> public ClusterDescription(final List<ServerDescription> serverDescriptions, final int acceptableLatencyMS) { <del> <del> this.all = Collections.unmodifiableList(new ArrayList<ServerDescription>(serverDescriptions)); <add> public ClusterDescription(final List<ServerDescription> all, final int acceptableLatencyMS) { <add> this.all = Collections.unmodifiableList(new ArrayList<ServerDescription>(all)); <ide> this.acceptableLatencyMS = acceptableLatencyMS; <del> <del> this.goodPrimaries = Collections.unmodifiableList(calculateGoodPrimaries(all, calculateBestPingTime(all), acceptableLatencyMS)); <del> this.goodSecondaries = Collections.unmodifiableList(calculateGoodSecondaries(all, calculateBestPingTime(all), acceptableLatencyMS)); <del> this.goodMembers = Collections.unmodifiableList(calculateGoodMembers(all, calculateBestPingTime(all), acceptableLatencyMS)); <ide> } <ide> <ide> public List<ServerDescription> getAll() { <ide> * @return a list of servers that can act as primaries\ <ide> */ <ide> public List<ServerDescription> getPrimaries() { <del> return goodPrimaries; <add> List<ServerDescription> primaries = getAllGoodPrimaries(all); <add> return getAllServersWithAcceptableLatency(primaries, calculateBestPingTime(primaries), acceptableLatencyMS); <ide> } <ide> <ide> public List<ServerDescription> getSecondaries() { <del> return goodSecondaries; <add> List<ServerDescription> secondaries = getAllGoodSecondaries(all); <add> return getAllServersWithAcceptableLatency(secondaries, calculateBestPingTime(secondaries), acceptableLatencyMS); <ide> } <ide> <ide> public List<ServerDescription> getSecondaries(final Tags tags) { <del> List<ServerDescription> taggedSecondaries = getMembersByTags(all, tags); <del> return calculateGoodSecondaries(taggedSecondaries, calculateBestPingTime(taggedSecondaries), acceptableLatencyMS); <add> List<ServerDescription> taggedServers = getServersByTags(all, tags); <add> List<ServerDescription> taggedSecondaries = getAllGoodSecondaries(taggedServers); <add> return getAllServersWithAcceptableLatency(taggedSecondaries, calculateBestPingTime(taggedSecondaries), acceptableLatencyMS); <ide> } <ide> <ide> public List<ServerDescription> getAny() { <del> return goodMembers; <add> List<ServerDescription> any = getAllGoodServers(all); <add> return getAllServersWithAcceptableLatency(any, calculateBestPingTime(any), acceptableLatencyMS); <ide> } <ide> <ide> public List<ServerDescription> getAny(final Tags tags) { <del> List<ServerDescription> taggedMembers = getMembersByTags(all, tags); <del> return calculateGoodMembers(taggedMembers, calculateBestPingTime(taggedMembers), acceptableLatencyMS); <add> List<ServerDescription> taggedServers = getServersByTags(all, tags); <add> List<ServerDescription> taggedAny = getAllGoodServers(taggedServers); <add> return getAllServersWithAcceptableLatency(taggedAny, calculateBestPingTime(taggedAny), acceptableLatencyMS); <ide> } <ide> <ide> @Override <ide> return bestPingTime; <ide> } <ide> <del> static List<ServerDescription> calculateGoodPrimaries(final List<ServerDescription> members, <del> final float bestPingTime, final int acceptableLatencyMS) { <del> final List<ServerDescription> goodPrimaries = new ArrayList<ServerDescription>(members.size()); <del> for (final ServerDescription cur : members) { <del> if (!cur.isPrimary()) { <del> continue; <del> } <del> if (cur.getAveragePingTimeMillis() - acceptableLatencyMS <= bestPingTime) { <add> static List<ServerDescription> getAllGoodPrimaries(final List<ServerDescription> servers) { <add> final List<ServerDescription> goodPrimaries = new ArrayList<ServerDescription>(servers.size()); <add> for (final ServerDescription cur : servers) { <add> if (cur.isPrimary()) { <ide> goodPrimaries.add(cur); <ide> } <ide> } <ide> return goodPrimaries; <ide> } <ide> <del> static List<ServerDescription> calculateGoodMembers(final List<ServerDescription> members, final float bestPingTime, <del> final int acceptableLatencyMS) { <del> final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(members.size()); <del> for (final ServerDescription cur : members) { <del> if (!cur.isOk()) { <del> continue; <add> static List<ServerDescription> getAllGoodServers(final List<ServerDescription> servers) { <add> final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(servers.size()); <add> for (final ServerDescription cur : servers) { <add> if (cur.isOk()) { <add> goodSecondaries.add(cur); <ide> } <add> } <add> return goodSecondaries; <add> } <add> <add> static List<ServerDescription> getAllGoodSecondaries(final List<ServerDescription> server) { <add> final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(server.size()); <add> for (final ServerDescription cur : server) { <add> if (cur.isSecondary()) { <add> goodSecondaries.add(cur); <add> } <add> } <add> return goodSecondaries; <add> } <add> <add> static List<ServerDescription> getAllServersWithAcceptableLatency(final List<ServerDescription> servers, <add> final float bestPingTime, final int acceptableLatencyMS) { <add> final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(servers.size()); <add> for (final ServerDescription cur : servers) { <ide> if (cur.getAveragePingTimeMillis() - acceptableLatencyMS <= bestPingTime) { <ide> goodSecondaries.add(cur); <ide> } <ide> return goodSecondaries; <ide> } <ide> <del> static List<ServerDescription> calculateGoodSecondaries(final List<ServerDescription> members, <del> final float bestPingTime, final int acceptableLatencyMS) { <del> final List<ServerDescription> goodSecondaries = new ArrayList<ServerDescription>(members.size()); <del> for (final ServerDescription cur : members) { <del> if (!cur.isSecondary()) { <del> continue; <del> } <del> if (cur.getAveragePingTimeMillis() - acceptableLatencyMS <= bestPingTime) { <del> goodSecondaries.add(cur); <del> } <del> } <del> return goodSecondaries; <del> } <del> <del> static List<ServerDescription> getMembersByTags(final List<ServerDescription> members, final Tags tags) { <add> static List<ServerDescription> getServersByTags(final List<ServerDescription> servers, final Tags tags) { <ide> <ide> final List<ServerDescription> membersByTag = new ArrayList<ServerDescription>(); <ide> <del> for (final ServerDescription cur : members) { <add> for (final ServerDescription cur : servers) { <ide> if (cur.hasTags(tags)) { <ide> membersByTag.add(cur); <ide> }
Java
unlicense
f3c554c5693e7a0c99348ec734d5492fa5e1b010
0
sebastianv89/hwsec
package terminal; public class Card { int id; int customerID; String customerName; short kilometers; long expiration; String expString; int revocation; String cardPublicKey; byte[] cardModulus; byte[] sessionKey; public Card(){ } public Card(int ID, int custID, String custName, short km, long exp, int revoke, String cardPK){ this.id = ID; this.customerID = custID; this.customerName = custName; this.kilometers = km; this.expiration = exp; this.revocation = revoke; this.cardPublicKey = cardPK; } public void setKilometers(short kmNew){ this.kilometers = kmNew; } public void setExpiration(long expNew){ this.expiration = expNew; } public void setStringExpiration(String strExpNew){ this.expString = strExpNew; } public void setCardModulus(byte[] cardByteModulus){ this.cardModulus = cardByteModulus; } public void setSessionKey(byte[] sessionkey){ this.sessionKey = sessionkey; } public byte[] getSessionKey(){ return this.sessionKey; } public int getID(){ return this.id; } public int getCustomerID(){ return this.customerID; } public String getCustomerName(){ return this.customerName; } public int getKilometers(){ return this.kilometers; } public String getStringKilometers(){ return this.expString; } public long getExpDate(){ return this.expiration; } public int getRevocationStates(){ return this.revocation; } public String getCardPublicKey(){ return this.cardPublicKey; } public byte[] getCardModulus(){ return this.cardModulus; } }
Backend/src/terminal/Card.java
package terminal; public class Card { int id; int customerID; String customerName; short kilometers; long expiration; String expString; int revocation; String cardPublicKey; byte[] cardModulus; public Card(){ } public Card(int ID, int custID, String custName, short km, long exp, int revoke, String cardPK){ this.id = ID; this.customerID = custID; this.customerName = custName; this.kilometers = km; this.expiration = exp; this.revocation = revoke; this.cardPublicKey = cardPK; } public void setKilometers(short kmNew){ this.kilometers = kmNew; } public void setExpiration(long expNew){ this.expiration = expNew; } public void setStringExpiration(String strExpNew){ this.expString = strExpNew; } public void setCardModulus(byte[] cardByteModulus){ this.cardModulus = cardByteModulus; } public int getID(){ return this.id; } public int getCustomerID(){ return this.customerID; } public String getCustomerName(){ return this.customerName; } public int getKilometers(){ return this.kilometers; } public String getStringKilometers(){ return this.expString; } public long getExpDate(){ return this.expiration; } public int getRevocationStates(){ return this.revocation; } public String getCardPublicKey(){ return this.cardPublicKey; } public byte[] getCardModulus(){ return this.cardModulus; } }
add sessionKey
Backend/src/terminal/Card.java
add sessionKey
<ide><path>ackend/src/terminal/Card.java <ide> String cardPublicKey; <ide> <ide> byte[] cardModulus; <add> byte[] sessionKey; <ide> <ide> <ide> public Card(){ <ide> this.cardModulus = cardByteModulus; <ide> } <ide> <add> public void setSessionKey(byte[] sessionkey){ <add> this.sessionKey = sessionkey; <add> } <add> <add> public byte[] getSessionKey(){ <add> return this.sessionKey; <add> } <ide> <ide> public int getID(){ <ide> return this.id;
Java
bsd-3-clause
95c34f1dff7fa0ab4aacf30f171e75d389bae33e
0
CBIIT/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers
package gov.nih.nci.cabig.caaers.web.selenium; import java.io.IOException; import java.net.URL; import java.net.URLClassLoader; import java.util.Properties; import javax.naming.NamingException; import org.apache.log4j.Logger; import org.apache.tools.ant.AntClassLoader; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.mock.jndi.SimpleNamingContextBuilder; import com.thoughtworks.selenium.DefaultSelenium; import com.thoughtworks.selenium.SeleneseTestCase; import org.springframework.core.io.*; public class CaaersSeleniumTestCase extends SeleneseTestCase { String studyId = null; AjaxWidgets aw; public final String BLAZING = "0"; public final String FAST = "1000"; public final String MEDIUM = "5000"; public final String SLOW = "10000"; private Logger log = Logger.getLogger(CaaersSeleniumTestCase.class); private RuntimeException acLoadFailure = null; private ApplicationContext applicationContext = null; String seleniumServerURL = null; String seleniumServerPort = null; String seleniumBrowser = null; String caaersURL = null; String seleniumSpeed = null; String seleniumRulesDir = null; protected ApplicationContext getDeployedApplicationContext() throws IOException { if (acLoadFailure == null && applicationContext == null) { try { SimpleNamingContextBuilder.emptyActivatedContextBuilder(); } catch (NamingException e) { throw new RuntimeException("", e); } try { log .debug("Initializing test version of deployed application context"); applicationContext = new ClassPathXmlApplicationContext( getConfigLocations()); /* * Resource resources[] = applicationContext.getResources("*"); * for(int i=0;i<resources.length;i++){ * System.out.println("\n "+ * i+": "+resources[i].getDescription()); } * System.out.println("\n Printing classpath:\n" * +getClasspathString()); */ } catch (RuntimeException e) { acLoadFailure = e; throw e; } } else if (acLoadFailure != null) { throw new RuntimeException( "Application context loading already failed. Will not retry. " + "Original cause attached.", acLoadFailure); } return applicationContext; } public String getClasspathString() { StringBuffer classpath = new StringBuffer(); ClassLoader applicationClassLoader = this.getClass().getClassLoader(); if (applicationClassLoader == null) { applicationClassLoader = ClassLoader.getSystemClassLoader(); } if (applicationClassLoader instanceof URLClassLoader) { URL[] urls = ((URLClassLoader) applicationClassLoader).getURLs(); for (int i = 0; i < urls.length; i++) { classpath.append(urls[i].getFile()).append("\r\n"); } return classpath.toString(); } else { return ((AntClassLoader) applicationClassLoader).getClasspath(); } } /** * The sub classes(testclasses) can override the config locations at * runtime. * * @return */ public final String[] getConfigLocations() { return new String[] { "classpath*:gov/nih/nci/cabig/caaers/applicationContext-selenium.xml" }; } public void setUp() throws Exception { // super.setUp(); Properties properties = (Properties) getDeployedApplicationContext() .getBean("caaersDatasourceFactoryBean"); seleniumServerURL = properties.getProperty("selenium.url"); seleniumServerPort = properties.getProperty("selenium.port"); seleniumBrowser = properties.getProperty("selenium.browser"); caaersURL = properties.getProperty("selenium.caaersURL"); seleniumSpeed = properties.getProperty("selenium.speed"); seleniumRulesDir = properties.getProperty("selenium.rules.dir"); System.out.println(seleniumServerURL); // setUp("https://oracle.qa.semanticbits.com", "*chrome"); // selenium = new DefaultSelenium("10.10.10.154", 4444, "*chrome", // "https://oracle.qa.semanticbits.com"); selenium = new DefaultSelenium(seleniumServerURL, Integer .parseInt(seleniumServerPort), seleniumBrowser, caaersURL); selenium.start(); aw = new AjaxWidgets(selenium); selenium.setSpeed(seleniumSpeed); } public void log(String message, Exception e) { log.debug(message, e); } public void log(String message) { this.log(message, null); } /* * public void testLogin() throws Exception { aw.login(); * assertTrue("Login Failure", true); assertTrue("Login Failure", * selenium.isTextPresent("Regular Tasks")); } */ public void searchStudy(String studyId) throws InterruptedException { selenium.open("/caaers/pages/task"); selenium.waitForPageToLoad("30000"); selenium.click("firstlevelnav_searchStudyController"); selenium.waitForPageToLoad("30000"); selenium.select("searchCriteria[0].searchType", "label=Identifier"); selenium.type("searchCriteria[0].searchText", studyId); selenium.click("//input[@value='Search']"); aw .waitForElementPresent("//input[@name='ajaxTable_f_primaryIdentifierValue']"); Thread.sleep(4000); } public void checkLogin() throws Exception { selenium.open("/caaers/pages/task"); selenium.waitForPageToLoad("30000"); if (selenium.isTextPresent("Please Log in")) { aw.login(); } } public void waitForCaaersStartup() throws Exception { selenium.open("caaers/public/login"); selenium.waitForPageToLoad("30000"); int maxAttempts = 20; int attempt = 0; System.out.println("Checking for caaers..."); for (attempt = 0; attempt < maxAttempts; attempt++) { if (!(selenium.isTextPresent("Please Log in")) && !(selenium.isTextPresent("Log out"))) { selenium.open("caaers/public/login"); selenium.waitForPageToLoad("30000"); System.out.println("\t attempt no: " + attempt); } else { aw.login(); break; } } if (attempt >= maxAttempts) throw new Exception("Timed out waiting for caaers to start."); } public void editStudy() throws Exception { aw.clickNext("flow-next"); populateEditStudyDetails(); aw.clickNext("flow-next"); aw.clickNext("flow-next"); populateEditStudyAgents(); aw.clickNext("flow-next"); populateEditStudyTreatmentAssignments(); aw.clickNext("flow-next"); populateEditStudyDisease(); aw.clickNext("flow-next"); populateEditStudyEvalPeriod(); aw.clickNext("flow-next"); // populateEditStudySites(); aw.clickNext("flow-next"); populateEditStudyInvestigators(); aw.clickNext("flow-next"); aw.clickNext("flow-next"); populateEditStudyIdentifiers(); aw.clickNext("flow-next"); } public void populateEditStudyIdentifiers() throws InterruptedException { selenium.click("add-organization-section-row-button"); aw.waitForElementPresent("study.identifiersLazy[2].value"); selenium.type("study.identifiersLazy[2].value", "N027D-test1"); selenium.select("study.identifiersLazy[2].type", "label=Site Identifier"); selenium .click("//select[@id='study.identifiersLazy[2].type']/option[2]"); aw.typeAutosuggest("study.identifiersLazy[2].organization-input", "mn003", "study.identifiersLazy[2].organization-choices"); selenium.click("//img[@alt='delete']"); selenium.waitForPageToLoad("30000"); } public void populateEditStudyInvestigators() throws InterruptedException { selenium.select("studySiteIndex", "label=University of Alabama at Birmingham (Site)"); Thread.sleep(5000); selenium.click("//img[@alt='delete']"); selenium.waitForPageToLoad("30000"); aw.confirmOK("^Do you really want to delete[\\s\\S]$"); selenium.click("add-ssi-table-row-button"); aw .waitForElementPresent("study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-input"); aw .typeAutosuggest( "study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-input", "john", "study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-choices"); selenium.select( "study.studyOrganizations[2].studyInvestigators[0].roleCode", "label=Site Principal Investigator"); selenium.click("//option[@value='Site Principal Investigator']"); selenium.select( "study.studyOrganizations[2].studyInvestigators[0].statusCode", "label=Active"); selenium.click("//option[@value='Active']"); } public void populateEditStudyEvalPeriod() throws InterruptedException { selenium.click("addSingleTermBtn"); aw.typeAutosuggest("termCode-input", "vasovagal", "termCode-choices"); selenium.click("addSingleTermBtn"); Thread.sleep(4000); selenium.click("DELETE_3"); aw.confirmOK("^Are you sure you want to delete this[\\s\\S]$"); Thread.sleep(1000); } public void populateEditStudySites() throws Exception { aw.addLastPanel("add-ss-section-button", "study.studySites[?].organization-clear"); //selenium.click("add-ss-section-button"); //aw.waitForElementPresent("study.studySites[1].organization-clear"); int index = aw.computeLatestElementIndex("study.studySites[?].organization-clear", true); selenium.click("study.studySites["+index+"].organization-clear"); aw.typeAutosuggest("study.studySites["+index+"].organization-input", "mn003", "study.studySites["+index+"].organization-choices"); selenium.click("//a[@id='del-"+index+"']/img"); selenium.waitForPageToLoad("30000"); aw.confirmOK("^Do you really want to delete[\\s\\S]$"); } public void populateEditStudyDisease() throws InterruptedException { Thread.sleep(4000); selenium.click("diseaseCategoryAsText-clear"); aw.typeAutosuggest("diseaseCategoryAsText-input", "bone", "diseaseCategoryAsText-choices"); Thread.sleep(3000); selenium.removeSelection("disease-sub-category", "label=All"); selenium.addSelection("disease-sub-category", "label=Osteosarcoma"); Thread.sleep(3000); selenium.removeSelection("disease-term", "label=All"); selenium.addSelection("disease-term", "label=Osteosarcoma"); selenium.click("//input[@value='Add disease']"); selenium.waitForPageToLoad("30000"); selenium .click("//div[@id='contentOf-']/center/table/tbody/tr[3]/td[3]/div/a/img"); selenium.waitForPageToLoad("30000"); aw.clickNext("flow-next"); aw.typeAutosuggest("termCode-input", "nausea", "termCode-choices"); selenium.click("addSingleTermBtn"); aw.waitForElementPresent("button-3207"); selenium.click("button-3207"); aw.confirmOK("^Are you sure you want to delete this[\\s\\S]$"); } public void populateEditStudyTreatmentAssignments() throws Exception { aw.addLastPanel("add-si-section-button", "//label[@for='study.treatmentAssignments[?].code']"); /* * selenium.click("add-si-section-button"); aw.waitForElementPresent( * "//label[@for='study.treatmentAssignments[1].code']"); */ String study_treatmentAssignments_code = aw.computeLatestElement( "study.treatmentAssignments[?].code", true); String study_treatmentAssignments_description = aw .computeLatestElement( "study.treatmentAssignments[?].description", true); selenium.type(study_treatmentAssignments_code, "TAC-1273812"); selenium.type(study_treatmentAssignments_description, "TAC-1273812 description"); aw.removeLastPanel("//div[@id='si-section-?']/div[1]/h3/div/a/img", "^Are you sure you want to delete this[\\s\\S]$"); /* * selenium.click("//div[@id='si-section-1']/div[1]/h3/div/a/img"); * aw.confirmOK("^Are you sure you want to delete this[\\s\\S]$"); */ Thread.sleep(4000); } public void populateEditStudyAgents() throws Exception { selenium.click("select-agent-0"); aw.addLastPanel("AddStudyAgent", "//div[@id='sa-section-?']"); // selenium.click("select-agent-2"); selenium.click(aw.computeLatestElement("select-agent-?", true)); String study_studyAgents_agent_input = aw.computeLatestElement( "study.studyAgents[?].agent-input", true); String study_studyAgents_agent_choices = aw.computeLatestElement( "study.studyAgents[?].agent-choices", true); /* * aw.typeAutosuggest("study.studyAgents[1].agent-input", "123127", * "study.studyAgents[2].agent-choices"); */ aw.typeAutosuggest(study_studyAgents_agent_input, "123127", study_studyAgents_agent_choices); selenium.select("study.studyAgents[1].indType", "label=CTEP IND"); aw.removeLastPanel("//div[@id='sa-section-?']/div[1]/h3/div/a/img", "^Are you sure you want to delete this[\\s\\S]$"); } public void populateEditStudyDetails() { selenium.type("//textarea[@id='study.precis']", "test precis"); selenium.type("study.studyCoordinatingCenter.organization-input", "NC002"); } public void createCTCStudy(String fundingSponsorIdentifier) throws InterruptedException { studyId = fundingSponsorIdentifier; populateCreateStudyDetails(); aw.clickNext("flow-next"); populateCreateStudyTherapies(); aw.clickNext("flow-next"); populateCreateStudyAgents(); aw.clickNext("flow-next"); populateCreateStudyTreatmentAssignments(); aw.clickNext("flow-next"); populateCreateStudyDiseases(); aw.clickNext("flow-next"); populateCreateStudyEvalPeriods("nausea"); aw.clickNext("flow-next"); populateCreateStudyExpectedAEs("vasculitis"); aw.clickNext("flow-next"); populateCreateStudySites(); aw.clickNext("flow-next"); populateCreateStudyInvestigators(); aw.clickNext("flow-next"); populateCreateStudyPersonnel(); aw.clickNext("flow-next"); populateCreateStudyIdentifiers(); aw.clickNext("flow-next"); aw.clickNext("flow-next"); } public void createMeddraStudy(String fundingSponsorIdentifier) throws InterruptedException { studyId = fundingSponsorIdentifier; populateCreateStudyDetails(); { selenium.select("study.aeTerminology.term", "label=MedDRA"); selenium.click("//option[@value='MEDDRA']"); Thread.sleep(1000); selenium.select("study.aeTerminology.meddraVersion", "label=MedDRA v9"); } aw.clickNext("flow-next"); populateCreateStudyTherapies(); aw.clickNext("flow-next"); populateCreateStudyAgents(); aw.clickNext("flow-next"); populateCreateStudyTreatmentAssignments(); aw.clickNext("flow-next"); populateCreateStudyDiseases(); aw.clickNext("flow-next"); populateCreateStudyEvalPeriods("Nervousness"); aw.clickNext("flow-next"); populateCreateStudyExpectedAEs("Deja-vu"); aw.clickNext("flow-next"); populateCreateStudySites(); aw.clickNext("flow-next"); populateCreateStudyInvestigators(); aw.clickNext("flow-next"); populateCreateStudyPersonnel(); aw.clickNext("flow-next"); populateCreateStudyIdentifiers(); aw.clickNext("flow-next"); aw.clickNext("flow-next"); } public void populateCreateStudyIdentifiers() { selenium.click("study.identifiersLazy[0].primaryIndicator"); } public void populateCreateStudyPersonnel() { ; } public void populateCreateStudyInvestigators() throws InterruptedException { selenium.select("studySiteIndex", "label=University of Alabama at Birmingham (Site)"); selenium.waitForPageToLoad("30000"); selenium.click("add-ssi-table-row-button"); aw .waitForElementPresent("study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-input"); aw .typeAutosuggest( "study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-input", "fiveash", "study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-choices"); selenium.select( "study.studyOrganizations[2].studyInvestigators[0].roleCode", "label=Site Investigator"); selenium.click("//option[@value='Site Investigator']"); selenium.select( "study.studyOrganizations[2].studyInvestigators[0].statusCode", "label=Active"); selenium.click("//option[@value='Active']"); } public void populateCreateStudySites() throws InterruptedException { selenium.click("add-ss-section-button"); aw.waitForElementPresent("study.studySites[0].organization-input"); aw.typeAutosuggest("study.studySites[0].organization-input", "AL002", "study.studySites[0].organization-choices"); } public void populateCreateStudyExpectedAEs(String aeTerm) throws InterruptedException { // typeAutosuggest("termCode-input", "vasculitis", "termCode-choices"); aw.typeAutosuggest("termCode-input", aeTerm, "termCode-choices"); selenium.click("addSingleTermBtn"); if (selenium.isElementPresent("addMultiTermBtn")) { selenium.click("addMultiTermBtn"); aw.waitForElementPresent("//div[@id='categories-div-id']"); selenium.removeSelection("categories", "label=AUDITORY/EAR"); selenium.addSelection("categories", "label=CARDIAC GENERAL"); aw.waitForElementPresent("//option[@title='Hypertension']"); selenium.addSelection("terms", "label=Hypertension"); selenium.addSelection("terms", "label=Hypotension"); selenium.click("addTermsBtn"); } } public void populateCreateStudyEvalPeriods(String aeTerm) throws InterruptedException { // typeAutosuggest("termCode-input", "nausea", "termCode-choices"); aw.typeAutosuggest("termCode-input", aeTerm, "termCode-choices"); selenium.click("addSingleTermBtn"); Thread.sleep(5000); selenium.click("ck0"); selenium.click("ck1"); selenium.click("ck2"); } public void populateCreateStudyDiseases() throws InterruptedException { selenium.click("diseaseCategoryAsText-clear"); aw.typeAutosuggest("diseaseCategoryAsText-input", "soft", "diseaseCategoryAsText-choices"); Thread.sleep(3000); selenium.addSelection("disease-sub-category", "label=All"); Thread.sleep(2000); selenium.removeSelection("disease-term", "label=All"); selenium.addSelection("disease-term", "label=Synovial sarcoma"); selenium.click("//input[@value='Add disease']"); selenium.waitForPageToLoad("30000"); } public void populateCreateStudyAgents() throws InterruptedException { selenium.click("AddStudyAgent"); aw.waitForElementPresent("study.studyAgents[0].agent-input"); selenium.click("select-agent-0"); aw.typeAutosuggest("study.studyAgents[0].agent-input", "683864", "study.studyAgents[0].agent-choices"); selenium.select("study.studyAgents[0].indType", "label=CTEP IND"); selenium.select("study.studyAgents[0].partOfLeadIND", "label=Yes"); } public void populateCreateStudyTreatmentAssignments() throws InterruptedException { selenium.click("add-si-section-button"); aw.waitForElementPresent("study.treatmentAssignments[0].code"); selenium.type("study.treatmentAssignments[0].code", "A0"); selenium .type( "study.treatmentAssignments[0].description", "Concomitant Treatment Cycle = 6 weeks: CCI-779: 25mg/wk IV over 30 min weekly RT: 200cGy 5 days a wk, starting 7-10 days after first CCI-779 dose TMZ: 75mg/m2 PO QD with RT Adjuvant Treatment Cycle = 28 days (Max = 6 cycles) CCI-779:"); } public void populateCreateStudyTherapies() { selenium.click("study.drugAdministrationTherapyType"); selenium.click("study.radiationTherapyType"); } public void populateCreateStudyDetails() throws InterruptedException { selenium.open("/caaers/pages/task"); selenium.click("firstlevelnav_searchStudyController"); selenium.waitForPageToLoad("30000"); selenium.click("//a[@id='secondlevelnav_createStudyController']/span"); selenium.waitForPageToLoad("30000"); selenium .type( "study.shortTitle", "Phase I Study of CCI-779 and Temozolomide in Combination with Radiation Therapy in Glioblastoma Multiforme"); selenium .type( "study.longTitle", "A Phase I Study of CCI-779 and Temozolomide in Combination with Radiation Therapy in Glioblastoma Multiforme"); selenium .type( "study.description", "A Phase I Study of CCI-779 and Temozolomide in Combination with Radiation Therapy in Glioblastoma Multiformexxxx"); selenium.select("study.phaseCode", "label=Phase I Trial"); selenium.select("study.status", "label=Active - Trial is open to accrual"); selenium.select("study.multiInstitutionIndicator", "label=Yes"); selenium.select("study.adeersReporting", "label=Yes"); selenium.select("study.aeTerminology.ctcVersion", "label=CTCAE v3.0"); selenium.select("study.otherMeddra", "label=MedDRA v9"); selenium.click("study.caaersXMLType"); selenium.click("study.adeersPDFType"); aw.typeAutosuggest("study.studyCoordinatingCenter.organization-input", "ncctg", "study.studyCoordinatingCenter.organization-choices"); selenium.type("study.identifiers[1].value", studyId); aw.typeAutosuggest("study.primaryFundingSponsorOrganization-input", "ctep", "study.primaryFundingSponsorOrganization-choices"); selenium.type("study.identifiers[0].value", studyId); } public void createInvestigator() throws Exception { selenium.open("/caaers/pages/task"); selenium.click("firstlevelnav_configurationController"); selenium.waitForPageToLoad("30000"); selenium .click("//a[@id='secondlevelnav_createInvestigatorController']/span"); selenium.waitForPageToLoad("30000"); selenium.type("firstName", "Jack"); selenium.type("lastName", "Black"); selenium.type("emailAddress", "[email protected]"); selenium.type("phoneNumber", "0000000000"); aw.typeAutosuggest("siteInvestigators[0].organization-input", "nci", "siteInvestigators[0].organization-choices"); selenium.select("siteInvestigators[0].statusCode", "label=Active"); selenium.click("flow-next"); selenium.waitForPageToLoad("30000"); } public void searchInvestigator(String firstName, String lastName) throws Exception { selenium.open("/caaers/pages/task"); selenium.click("firstlevelnav_configurationController"); selenium.waitForPageToLoad("30000"); selenium .click("//a[@id='secondlevelnav_createInvestigatorController']/span"); selenium.waitForPageToLoad("30000"); selenium.click("link=Search Investigator"); selenium.waitForPageToLoad("30000"); selenium.type("firstName", firstName); selenium.type("lastName", lastName); selenium.click("//input[@value='Search']"); aw.waitForElementPresent("//td[@title='Sort By First Name']"); } public void uploadRules() throws Exception { selenium.open("/caaers/pages/task"); selenium.waitForPageToLoad("30000"); selenium.click("firstlevelnav_createRuleController"); selenium.waitForPageToLoad("30000"); selenium.click("//a[@id='secondlevelnav_importRuleController']/span"); selenium.waitForPageToLoad("30000"); String files[] = { "\\gov.nih.nci.cabig.caaers.rules.sponsor.cancer_therapy_evaluation_program.mandatory_sections_rules.xml", "\\gov.nih.nci.cabig.caaers.rules.sponsor.cancer_therapy_evaluation_program.sae_reporting_rules.xml", "\\gov.nih.nci.cabig.caaers.rules.sponsor.division_of_cancer_prevention.mandatory_sections_rules.xml", "\\gov.nih.nci.cabig.caaers.rules.sponsor.division_of_cancer_prevention.sae_reporting_rules.xml" }; for (int i = 0; i < files.length; i++) { // String absPath = new File(rulesDir+files[i]).getAbsolutePath(); String absPath = seleniumRulesDir + files[i]; System.out.println("Rule file being imported: " + absPath); log("Uploading rule from: " + absPath); selenium.type("ruleSetFile1", absPath); selenium.click("//input[@value='Import']"); selenium.waitForPageToLoad("30000"); if (!selenium .isElementPresent("//p[contains(text(),'Rules imported successfully')]")) throw new Exception("Error when importing following rule xml: " + absPath); } } // public void editInvestigator(String ){} /* * public void testNew() throws Exception { * * selenium.click("firstlevelnav_listAdverseEventsController"); * selenium.waitForPageToLoad("30000"); selenium.click("study-clear"); * selenium.type("study-input", "n"); selenium.typeKeys("study-input", * "027d"); fail("test"); * selenium.waitForCondition(String.format("selenium.isTextPresent('%s')", * "Temozolomide"), "10000"); * selenium.click("//div[@id='study-choices']/ul/li[1]"); * selenium.click("participant-input"); selenium.click("participant-clear"); * selenium.typeKeys("participant-input", "Jones"); * selenium.waitForCondition(String.format("selenium.isTextPresent('%s')", * "Catherine"), "10000"); * selenium.click("//div[@id='participant-choices']/ul/li[1]"); * aw.clickNext("flow-next"); } */ }
projects/selenium/src/test/java/gov/nih/nci/cabig/caaers/web/selenium/CaaersSeleniumTestCase.java
package gov.nih.nci.cabig.caaers.web.selenium; import java.io.IOException; import java.net.URL; import java.net.URLClassLoader; import java.util.Properties; import javax.naming.NamingException; import org.apache.log4j.Logger; import org.apache.tools.ant.AntClassLoader; import org.springframework.context.ApplicationContext; import org.springframework.context.support.ClassPathXmlApplicationContext; import org.springframework.mock.jndi.SimpleNamingContextBuilder; import com.thoughtworks.selenium.DefaultSelenium; import com.thoughtworks.selenium.SeleneseTestCase; import org.springframework.core.io.*; public class CaaersSeleniumTestCase extends SeleneseTestCase { String studyId = null; AjaxWidgets aw; public final String BLAZING = "0"; public final String FAST = "1000"; public final String MEDIUM = "5000"; public final String SLOW = "10000"; private Logger log = Logger.getLogger(CaaersSeleniumTestCase.class); private RuntimeException acLoadFailure = null; private ApplicationContext applicationContext = null; String seleniumServerURL = null; String seleniumServerPort = null; String seleniumBrowser = null; String caaersURL = null; String seleniumSpeed = null; String seleniumRulesDir = null; protected ApplicationContext getDeployedApplicationContext() throws IOException { if (acLoadFailure == null && applicationContext == null) { try { SimpleNamingContextBuilder.emptyActivatedContextBuilder(); } catch (NamingException e) { throw new RuntimeException("", e); } try { log .debug("Initializing test version of deployed application context"); applicationContext = new ClassPathXmlApplicationContext( getConfigLocations()); /* * Resource resources[] = applicationContext.getResources("*"); * for(int i=0;i<resources.length;i++){ * System.out.println("\n "+ * i+": "+resources[i].getDescription()); } * System.out.println("\n Printing classpath:\n" * +getClasspathString()); */ } catch (RuntimeException e) { acLoadFailure = e; throw e; } } else if (acLoadFailure != null) { throw new RuntimeException( "Application context loading already failed. Will not retry. " + "Original cause attached.", acLoadFailure); } return applicationContext; } public String getClasspathString() { StringBuffer classpath = new StringBuffer(); ClassLoader applicationClassLoader = this.getClass().getClassLoader(); if (applicationClassLoader == null) { applicationClassLoader = ClassLoader.getSystemClassLoader(); } if (applicationClassLoader instanceof URLClassLoader) { URL[] urls = ((URLClassLoader) applicationClassLoader).getURLs(); for (int i = 0; i < urls.length; i++) { classpath.append(urls[i].getFile()).append("\r\n"); } return classpath.toString(); } else { return ((AntClassLoader) applicationClassLoader).getClasspath(); } } /** * The sub classes(testclasses) can override the config locations at * runtime. * * @return */ public final String[] getConfigLocations() { return new String[] { "classpath*:gov/nih/nci/cabig/caaers/applicationContext-selenium.xml" }; } public void setUp() throws Exception { // super.setUp(); Properties properties = (Properties) getDeployedApplicationContext() .getBean("caaersDatasourceFactoryBean"); seleniumServerURL = properties.getProperty("selenium.url"); seleniumServerPort = properties.getProperty("selenium.port"); seleniumBrowser = properties.getProperty("selenium.browser"); caaersURL = properties.getProperty("selenium.caaersURL"); seleniumSpeed = properties.getProperty("selenium.speed"); seleniumRulesDir = properties.getProperty("selenium.rules.dir"); System.out.println(seleniumServerURL); // setUp("https://oracle.qa.semanticbits.com", "*chrome"); // selenium = new DefaultSelenium("10.10.10.154", 4444, "*chrome", // "https://oracle.qa.semanticbits.com"); selenium = new DefaultSelenium(seleniumServerURL, Integer .parseInt(seleniumServerPort), seleniumBrowser, caaersURL); selenium.start(); aw = new AjaxWidgets(selenium); selenium.setSpeed(seleniumSpeed); } public void log(String message, Exception e) { log.debug(message, e); } public void log(String message) { this.log(message, null); } /* * public void testLogin() throws Exception { aw.login(); * assertTrue("Login Failure", true); assertTrue("Login Failure", * selenium.isTextPresent("Regular Tasks")); } */ public void searchStudy(String studyId) throws InterruptedException { selenium.open("/caaers/pages/task"); selenium.waitForPageToLoad("30000"); selenium.click("firstlevelnav_searchStudyController"); selenium.waitForPageToLoad("30000"); selenium.select("searchCriteria[0].searchType", "label=Identifier"); selenium.type("searchCriteria[0].searchText", studyId); selenium.click("//input[@value='Search']"); aw .waitForElementPresent("//input[@name='ajaxTable_f_primaryIdentifierValue']"); Thread.sleep(4000); } public void checkLogin() throws Exception { selenium.open("/caaers/pages/task"); selenium.waitForPageToLoad("30000"); if (selenium.isTextPresent("Please Log in")) { aw.login(); } } public void waitForCaaersStartup() throws Exception { selenium.open("caaers/public/login"); selenium.waitForPageToLoad("30000"); int maxAttempts = 20; int attempt = 0; for (attempt = 0; attempt < maxAttempts; attempt++) { if (!(selenium.isTextPresent("Please Log in")) && !(selenium.isTextPresent("Log out"))) { selenium.open("caaers/public/login"); selenium.waitForPageToLoad("30000"); System.out.println("Checking for caaers, attempt no: " + attempt); } else { aw.login(); break; } } if (attempt >= maxAttempts) throw new Exception("Timed out waiting for caaers to start."); } public void editStudy() throws Exception { aw.clickNext("flow-next"); populateEditStudyDetails(); aw.clickNext("flow-next"); aw.clickNext("flow-next"); populateEditStudyAgents(); aw.clickNext("flow-next"); populateEditStudyTreatmentAssignments(); aw.clickNext("flow-next"); populateEditStudyDisease(); aw.clickNext("flow-next"); populateEditStudyEvalPeriod(); aw.clickNext("flow-next"); // populateEditStudySites(); aw.clickNext("flow-next"); populateEditStudyInvestigators(); aw.clickNext("flow-next"); aw.clickNext("flow-next"); populateEditStudyIdentifiers(); aw.clickNext("flow-next"); } public void populateEditStudyIdentifiers() throws InterruptedException { selenium.click("add-organization-section-row-button"); aw.waitForElementPresent("study.identifiersLazy[2].value"); selenium.type("study.identifiersLazy[2].value", "N027D-test1"); selenium.select("study.identifiersLazy[2].type", "label=Site Identifier"); selenium .click("//select[@id='study.identifiersLazy[2].type']/option[2]"); aw.typeAutosuggest("study.identifiersLazy[2].organization-input", "mn003", "study.identifiersLazy[2].organization-choices"); selenium.click("//img[@alt='delete']"); selenium.waitForPageToLoad("30000"); } public void populateEditStudyInvestigators() throws InterruptedException { selenium.select("studySiteIndex", "label=University of Alabama at Birmingham (Site)"); Thread.sleep(5000); selenium.click("//img[@alt='delete']"); selenium.waitForPageToLoad("30000"); aw.confirmOK("^Do you really want to delete[\\s\\S]$"); selenium.click("add-ssi-table-row-button"); aw .waitForElementPresent("study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-input"); aw .typeAutosuggest( "study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-input", "john", "study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-choices"); selenium.select( "study.studyOrganizations[2].studyInvestigators[0].roleCode", "label=Site Principal Investigator"); selenium.click("//option[@value='Site Principal Investigator']"); selenium.select( "study.studyOrganizations[2].studyInvestigators[0].statusCode", "label=Active"); selenium.click("//option[@value='Active']"); } public void populateEditStudyEvalPeriod() throws InterruptedException { selenium.click("addSingleTermBtn"); aw.typeAutosuggest("termCode-input", "vasovagal", "termCode-choices"); selenium.click("addSingleTermBtn"); Thread.sleep(4000); selenium.click("DELETE_3"); aw.confirmOK("^Are you sure you want to delete this[\\s\\S]$"); Thread.sleep(1000); } public void populateEditStudySites() throws Exception { aw.addLastPanel("add-ss-section-button", "study.studySites[?].organization-clear"); //selenium.click("add-ss-section-button"); //aw.waitForElementPresent("study.studySites[1].organization-clear"); int index = aw.computeLatestElementIndex("study.studySites[?].organization-clear", true); selenium.click("study.studySites["+index+"].organization-clear"); aw.typeAutosuggest("study.studySites["+index+"].organization-input", "mn003", "study.studySites["+index+"].organization-choices"); selenium.click("//a[@id='del-"+index+"']/img"); selenium.waitForPageToLoad("30000"); aw.confirmOK("^Do you really want to delete[\\s\\S]$"); } public void populateEditStudyDisease() throws InterruptedException { Thread.sleep(4000); selenium.click("diseaseCategoryAsText-clear"); aw.typeAutosuggest("diseaseCategoryAsText-input", "bone", "diseaseCategoryAsText-choices"); Thread.sleep(3000); selenium.removeSelection("disease-sub-category", "label=All"); selenium.addSelection("disease-sub-category", "label=Osteosarcoma"); Thread.sleep(3000); selenium.removeSelection("disease-term", "label=All"); selenium.addSelection("disease-term", "label=Osteosarcoma"); selenium.click("//input[@value='Add disease']"); selenium.waitForPageToLoad("30000"); selenium .click("//div[@id='contentOf-']/center/table/tbody/tr[3]/td[3]/div/a/img"); selenium.waitForPageToLoad("30000"); aw.clickNext("flow-next"); aw.typeAutosuggest("termCode-input", "nausea", "termCode-choices"); selenium.click("addSingleTermBtn"); aw.waitForElementPresent("button-3207"); selenium.click("button-3207"); aw.confirmOK("^Are you sure you want to delete this[\\s\\S]$"); } public void populateEditStudyTreatmentAssignments() throws Exception { aw.addLastPanel("add-si-section-button", "//label[@for='study.treatmentAssignments[?].code']"); /* * selenium.click("add-si-section-button"); aw.waitForElementPresent( * "//label[@for='study.treatmentAssignments[1].code']"); */ String study_treatmentAssignments_code = aw.computeLatestElement( "study.treatmentAssignments[?].code", true); String study_treatmentAssignments_description = aw .computeLatestElement( "study.treatmentAssignments[?].description", true); selenium.type(study_treatmentAssignments_code, "TAC-1273812"); selenium.type(study_treatmentAssignments_description, "TAC-1273812 description"); aw.removeLastPanel("//div[@id='si-section-?']/div[1]/h3/div/a/img", "^Are you sure you want to delete this[\\s\\S]$"); /* * selenium.click("//div[@id='si-section-1']/div[1]/h3/div/a/img"); * aw.confirmOK("^Are you sure you want to delete this[\\s\\S]$"); */ Thread.sleep(4000); } public void populateEditStudyAgents() throws Exception { selenium.click("select-agent-0"); aw.addLastPanel("AddStudyAgent", "//div[@id='sa-section-?']"); // selenium.click("select-agent-2"); selenium.click(aw.computeLatestElement("select-agent-?", true)); String study_studyAgents_agent_input = aw.computeLatestElement( "study.studyAgents[?].agent-input", true); String study_studyAgents_agent_choices = aw.computeLatestElement( "study.studyAgents[?].agent-choices", true); /* * aw.typeAutosuggest("study.studyAgents[1].agent-input", "123127", * "study.studyAgents[2].agent-choices"); */ aw.typeAutosuggest(study_studyAgents_agent_input, "123127", study_studyAgents_agent_choices); selenium.select("study.studyAgents[1].indType", "label=CTEP IND"); aw.removeLastPanel("//div[@id='sa-section-?']/div[1]/h3/div/a/img", "^Are you sure you want to delete this[\\s\\S]$"); } public void populateEditStudyDetails() { selenium.type("//textarea[@id='study.precis']", "test precis"); selenium.type("study.studyCoordinatingCenter.organization-input", "NC002"); } public void createCTCStudy(String fundingSponsorIdentifier) throws InterruptedException { studyId = fundingSponsorIdentifier; populateCreateStudyDetails(); aw.clickNext("flow-next"); populateCreateStudyTherapies(); aw.clickNext("flow-next"); populateCreateStudyAgents(); aw.clickNext("flow-next"); populateCreateStudyTreatmentAssignments(); aw.clickNext("flow-next"); populateCreateStudyDiseases(); aw.clickNext("flow-next"); populateCreateStudyEvalPeriods("nausea"); aw.clickNext("flow-next"); populateCreateStudyExpectedAEs("vasculitis"); aw.clickNext("flow-next"); populateCreateStudySites(); aw.clickNext("flow-next"); populateCreateStudyInvestigators(); aw.clickNext("flow-next"); populateCreateStudyPersonnel(); aw.clickNext("flow-next"); populateCreateStudyIdentifiers(); aw.clickNext("flow-next"); aw.clickNext("flow-next"); } public void createMeddraStudy(String fundingSponsorIdentifier) throws InterruptedException { studyId = fundingSponsorIdentifier; populateCreateStudyDetails(); { selenium.select("study.aeTerminology.term", "label=MedDRA"); selenium.click("//option[@value='MEDDRA']"); Thread.sleep(1000); selenium.select("study.aeTerminology.meddraVersion", "label=MedDRA v9"); } aw.clickNext("flow-next"); populateCreateStudyTherapies(); aw.clickNext("flow-next"); populateCreateStudyAgents(); aw.clickNext("flow-next"); populateCreateStudyTreatmentAssignments(); aw.clickNext("flow-next"); populateCreateStudyDiseases(); aw.clickNext("flow-next"); populateCreateStudyEvalPeriods("Nervousness"); aw.clickNext("flow-next"); populateCreateStudyExpectedAEs("Deja-vu"); aw.clickNext("flow-next"); populateCreateStudySites(); aw.clickNext("flow-next"); populateCreateStudyInvestigators(); aw.clickNext("flow-next"); populateCreateStudyPersonnel(); aw.clickNext("flow-next"); populateCreateStudyIdentifiers(); aw.clickNext("flow-next"); aw.clickNext("flow-next"); } public void populateCreateStudyIdentifiers() { selenium.click("study.identifiersLazy[0].primaryIndicator"); } public void populateCreateStudyPersonnel() { ; } public void populateCreateStudyInvestigators() throws InterruptedException { selenium.select("studySiteIndex", "label=University of Alabama at Birmingham (Site)"); selenium.waitForPageToLoad("30000"); selenium.click("add-ssi-table-row-button"); aw .waitForElementPresent("study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-input"); aw .typeAutosuggest( "study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-input", "fiveash", "study.studyOrganizations[2].studyInvestigators[0].siteInvestigator-choices"); selenium.select( "study.studyOrganizations[2].studyInvestigators[0].roleCode", "label=Site Investigator"); selenium.click("//option[@value='Site Investigator']"); selenium.select( "study.studyOrganizations[2].studyInvestigators[0].statusCode", "label=Active"); selenium.click("//option[@value='Active']"); } public void populateCreateStudySites() throws InterruptedException { selenium.click("add-ss-section-button"); aw.waitForElementPresent("study.studySites[0].organization-input"); aw.typeAutosuggest("study.studySites[0].organization-input", "AL002", "study.studySites[0].organization-choices"); } public void populateCreateStudyExpectedAEs(String aeTerm) throws InterruptedException { // typeAutosuggest("termCode-input", "vasculitis", "termCode-choices"); aw.typeAutosuggest("termCode-input", aeTerm, "termCode-choices"); selenium.click("addSingleTermBtn"); if (selenium.isElementPresent("addMultiTermBtn")) { selenium.click("addMultiTermBtn"); aw.waitForElementPresent("//div[@id='categories-div-id']"); selenium.removeSelection("categories", "label=AUDITORY/EAR"); selenium.addSelection("categories", "label=CARDIAC GENERAL"); aw.waitForElementPresent("//option[@title='Hypertension']"); selenium.addSelection("terms", "label=Hypertension"); selenium.addSelection("terms", "label=Hypotension"); selenium.click("addTermsBtn"); } } public void populateCreateStudyEvalPeriods(String aeTerm) throws InterruptedException { // typeAutosuggest("termCode-input", "nausea", "termCode-choices"); aw.typeAutosuggest("termCode-input", aeTerm, "termCode-choices"); selenium.click("addSingleTermBtn"); Thread.sleep(5000); selenium.click("ck0"); selenium.click("ck1"); selenium.click("ck2"); } public void populateCreateStudyDiseases() throws InterruptedException { selenium.click("diseaseCategoryAsText-clear"); aw.typeAutosuggest("diseaseCategoryAsText-input", "soft", "diseaseCategoryAsText-choices"); Thread.sleep(3000); selenium.addSelection("disease-sub-category", "label=All"); Thread.sleep(2000); selenium.removeSelection("disease-term", "label=All"); selenium.addSelection("disease-term", "label=Synovial sarcoma"); selenium.click("//input[@value='Add disease']"); selenium.waitForPageToLoad("30000"); } public void populateCreateStudyAgents() throws InterruptedException { selenium.click("AddStudyAgent"); aw.waitForElementPresent("study.studyAgents[0].agent-input"); selenium.click("select-agent-0"); aw.typeAutosuggest("study.studyAgents[0].agent-input", "683864", "study.studyAgents[0].agent-choices"); selenium.select("study.studyAgents[0].indType", "label=CTEP IND"); selenium.select("study.studyAgents[0].partOfLeadIND", "label=Yes"); } public void populateCreateStudyTreatmentAssignments() throws InterruptedException { selenium.click("add-si-section-button"); aw.waitForElementPresent("study.treatmentAssignments[0].code"); selenium.type("study.treatmentAssignments[0].code", "A0"); selenium .type( "study.treatmentAssignments[0].description", "Concomitant Treatment Cycle = 6 weeks: CCI-779: 25mg/wk IV over 30 min weekly RT: 200cGy 5 days a wk, starting 7-10 days after first CCI-779 dose TMZ: 75mg/m2 PO QD with RT Adjuvant Treatment Cycle = 28 days (Max = 6 cycles) CCI-779:"); } public void populateCreateStudyTherapies() { selenium.click("study.drugAdministrationTherapyType"); selenium.click("study.radiationTherapyType"); } public void populateCreateStudyDetails() throws InterruptedException { selenium.open("/caaers/pages/task"); selenium.click("firstlevelnav_searchStudyController"); selenium.waitForPageToLoad("30000"); selenium.click("//a[@id='secondlevelnav_createStudyController']/span"); selenium.waitForPageToLoad("30000"); selenium .type( "study.shortTitle", "Phase I Study of CCI-779 and Temozolomide in Combination with Radiation Therapy in Glioblastoma Multiforme"); selenium .type( "study.longTitle", "A Phase I Study of CCI-779 and Temozolomide in Combination with Radiation Therapy in Glioblastoma Multiforme"); selenium .type( "study.description", "A Phase I Study of CCI-779 and Temozolomide in Combination with Radiation Therapy in Glioblastoma Multiformexxxx"); selenium.select("study.phaseCode", "label=Phase I Trial"); selenium.select("study.status", "label=Active - Trial is open to accrual"); selenium.select("study.multiInstitutionIndicator", "label=Yes"); selenium.select("study.adeersReporting", "label=Yes"); selenium.select("study.aeTerminology.ctcVersion", "label=CTCAE v3.0"); selenium.select("study.otherMeddra", "label=MedDRA v9"); selenium.click("study.caaersXMLType"); selenium.click("study.adeersPDFType"); aw.typeAutosuggest("study.studyCoordinatingCenter.organization-input", "ncctg", "study.studyCoordinatingCenter.organization-choices"); selenium.type("study.identifiers[1].value", studyId); aw.typeAutosuggest("study.primaryFundingSponsorOrganization-input", "ctep", "study.primaryFundingSponsorOrganization-choices"); selenium.type("study.identifiers[0].value", studyId); } public void createInvestigator() throws Exception { selenium.open("/caaers/pages/task"); selenium.click("firstlevelnav_configurationController"); selenium.waitForPageToLoad("30000"); selenium .click("//a[@id='secondlevelnav_createInvestigatorController']/span"); selenium.waitForPageToLoad("30000"); selenium.type("firstName", "Jack"); selenium.type("lastName", "Black"); selenium.type("emailAddress", "[email protected]"); selenium.type("phoneNumber", "0000000000"); aw.typeAutosuggest("siteInvestigators[0].organization-input", "nci", "siteInvestigators[0].organization-choices"); selenium.select("siteInvestigators[0].statusCode", "label=Active"); selenium.click("flow-next"); selenium.waitForPageToLoad("30000"); } public void searchInvestigator(String firstName, String lastName) throws Exception { selenium.open("/caaers/pages/task"); selenium.click("firstlevelnav_configurationController"); selenium.waitForPageToLoad("30000"); selenium .click("//a[@id='secondlevelnav_createInvestigatorController']/span"); selenium.waitForPageToLoad("30000"); selenium.click("link=Search Investigator"); selenium.waitForPageToLoad("30000"); selenium.type("firstName", firstName); selenium.type("lastName", lastName); selenium.click("//input[@value='Search']"); aw.waitForElementPresent("//td[@title='Sort By First Name']"); } public void uploadRules() throws Exception { selenium.open("/caaers/pages/task"); selenium.waitForPageToLoad("30000"); selenium.click("firstlevelnav_createRuleController"); selenium.waitForPageToLoad("30000"); selenium.click("//a[@id='secondlevelnav_importRuleController']/span"); selenium.waitForPageToLoad("30000"); String files[] = { "\\gov.nih.nci.cabig.caaers.rules.sponsor.cancer_therapy_evaluation_program.mandatory_sections_rules.xml", "\\gov.nih.nci.cabig.caaers.rules.sponsor.cancer_therapy_evaluation_program.sae_reporting_rules.xml", "\\gov.nih.nci.cabig.caaers.rules.sponsor.division_of_cancer_prevention.mandatory_sections_rules.xml", "\\gov.nih.nci.cabig.caaers.rules.sponsor.division_of_cancer_prevention.sae_reporting_rules.xml" }; for (int i = 0; i < files.length; i++) { // String absPath = new File(rulesDir+files[i]).getAbsolutePath(); String absPath = seleniumRulesDir + files[i]; System.out.println("Rule file being imported: " + absPath); log("Uploading rule from: " + absPath); selenium.type("ruleSetFile1", absPath); selenium.click("//input[@value='Import']"); selenium.waitForPageToLoad("30000"); if (!selenium .isElementPresent("//p[contains(text(),'Rules imported successfully')]")) throw new Exception("Error when importing following rule xml: " + absPath); } } // public void editInvestigator(String ){} /* * public void testNew() throws Exception { * * selenium.click("firstlevelnav_listAdverseEventsController"); * selenium.waitForPageToLoad("30000"); selenium.click("study-clear"); * selenium.type("study-input", "n"); selenium.typeKeys("study-input", * "027d"); fail("test"); * selenium.waitForCondition(String.format("selenium.isTextPresent('%s')", * "Temozolomide"), "10000"); * selenium.click("//div[@id='study-choices']/ul/li[1]"); * selenium.click("participant-input"); selenium.click("participant-clear"); * selenium.typeKeys("participant-input", "Jones"); * selenium.waitForCondition(String.format("selenium.isTextPresent('%s')", * "Catherine"), "10000"); * selenium.click("//div[@id='participant-choices']/ul/li[1]"); * aw.clickNext("flow-next"); } */ }
latest selenium updates SVN-Revision: 8099
projects/selenium/src/test/java/gov/nih/nci/cabig/caaers/web/selenium/CaaersSeleniumTestCase.java
latest selenium updates
<ide><path>rojects/selenium/src/test/java/gov/nih/nci/cabig/caaers/web/selenium/CaaersSeleniumTestCase.java <ide> selenium.waitForPageToLoad("30000"); <ide> int maxAttempts = 20; <ide> int attempt = 0; <add> System.out.println("Checking for caaers..."); <ide> for (attempt = 0; attempt < maxAttempts; attempt++) { <ide> if (!(selenium.isTextPresent("Please Log in")) <ide> && !(selenium.isTextPresent("Log out"))) { <ide> selenium.open("caaers/public/login"); <ide> selenium.waitForPageToLoad("30000"); <del> System.out.println("Checking for caaers, attempt no: " <add> System.out.println("\t attempt no: " <ide> + attempt); <ide> <ide> } else {
Java
apache-2.0
008c248ca5d6e8e682b02fc944483881ee597fd8
0
HubSpot/Baragon,HubSpot/Baragon,HubSpot/Baragon
BaragonAgentService/src/main/java/com/hubspot/baragon/agent/handlebars/ToLowercaseHelper.java
package com.hubspot.baragon.agent.handlebars; import java.net.UnknownHostException; import com.github.jknack.handlebars.Helper; import com.github.jknack.handlebars.Options; import com.google.common.base.Strings; public class ToLowercaseHelper implements Helper<String> { public static final String NAME = "toLowercase"; @Override public CharSequence apply(String input, Options options) throws UnknownHostException { if (!Strings.isNullOrEmpty(input)) { return input.toLowerCase(); } else { return input; } } }
remove tolower helper
BaragonAgentService/src/main/java/com/hubspot/baragon/agent/handlebars/ToLowercaseHelper.java
remove tolower helper
<ide><path>aragonAgentService/src/main/java/com/hubspot/baragon/agent/handlebars/ToLowercaseHelper.java <del>package com.hubspot.baragon.agent.handlebars; <del> <del>import java.net.UnknownHostException; <del> <del>import com.github.jknack.handlebars.Helper; <del>import com.github.jknack.handlebars.Options; <del>import com.google.common.base.Strings; <del> <del>public class ToLowercaseHelper implements Helper<String> { <del> <del> public static final String NAME = "toLowercase"; <del> <del> @Override <del> public CharSequence apply(String input, Options options) throws UnknownHostException { <del> if (!Strings.isNullOrEmpty(input)) { <del> return input.toLowerCase(); <del> } else { <del> return input; <del> } <del> } <del>}
Java
apache-2.0
f3e4f0ae503fb46318bc1661c896f3d5867c4957
0
google/copybara,google/copybara,google/copybara
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.copybara.git; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.copybara.GeneralOptions.FORCE; import static com.google.copybara.LazyResourceLoader.memoized; import static com.google.copybara.exception.ValidationException.checkCondition; import static java.lang.String.format; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.flogger.FluentLogger; import com.google.copybara.Change; import com.google.copybara.ChangeMessage; import com.google.copybara.Destination; import com.google.copybara.DestinationEffect; import com.google.copybara.DestinationStatusVisitor; import com.google.copybara.Endpoint; import com.google.copybara.GeneralOptions; import com.google.copybara.LabelFinder; import com.google.copybara.LazyResourceLoader; import com.google.copybara.Revision; import com.google.copybara.TransformResult; import com.google.copybara.WriterContext; import com.google.copybara.exception.CannotResolveRevisionException; import com.google.copybara.exception.ChangeRejectedException; import com.google.copybara.exception.RepoException; import com.google.copybara.exception.ValidationException; import com.google.copybara.git.GitDestination.WriterImpl.WriteHook; import com.google.copybara.profiler.Profiler.ProfilerTask; import com.google.copybara.util.DiffUtil; import com.google.copybara.util.Glob; import com.google.copybara.util.console.Console; import com.google.devtools.build.lib.syntax.SkylarkList; import java.io.IOException; import java.nio.file.PathMatcher; import java.nio.file.Paths; import java.util.List; import java.util.Objects; import javax.annotation.Nullable; /** * A Git repository destination. */ public final class GitDestination implements Destination<GitRevision> { private static final String ORIGIN_LABEL_SEPARATOR = ": "; static class MessageInfo { final ImmutableList<LabelFinder> labelsToAdd; MessageInfo(ImmutableList<LabelFinder> labelsToAdd) { this.labelsToAdd = checkNotNull(labelsToAdd); } } private static final FluentLogger logger = FluentLogger.forEnclosingClass(); private final String repoUrl; private final String fetch; private final String push; private final GitDestinationOptions destinationOptions; private final GitOptions gitOptions; private final GeneralOptions generalOptions; private final Iterable<GitIntegrateChanges> integrates; private final WriteHook writerHook; private final LazyResourceLoader<GitRepository> localRepo; GitDestination( String repoUrl, String fetch, String push, GitDestinationOptions destinationOptions, GitOptions gitOptions, GeneralOptions generalOptions, WriteHook writerHook, Iterable<GitIntegrateChanges> integrates) { this.repoUrl = checkNotNull(repoUrl); this.fetch = checkNotNull(fetch); this.push = checkNotNull(push); this.destinationOptions = checkNotNull(destinationOptions); this.gitOptions = Preconditions.checkNotNull(gitOptions); this.generalOptions = Preconditions.checkNotNull(generalOptions); this.integrates = Preconditions.checkNotNull(integrates); this.writerHook = checkNotNull(writerHook); this.localRepo = memoized(ignored -> destinationOptions.localGitRepo(repoUrl)); } /** * Throws an exception if the user.email or user.name Git configuration settings are not set. This * helps ensure that the committer field of generated commits is correct. */ private static void verifyUserInfoConfigured(GitRepository repo) throws RepoException, ValidationException { String output = repo.simpleCommand("config", "-l").getStdout(); boolean nameConfigured = false; boolean emailConfigured = false; for (String line : output.split("\n")) { if (line.startsWith("user.name=")) { nameConfigured = true; } else if (line.startsWith("user.email=")) { emailConfigured = true; } } checkCondition(nameConfigured && emailConfigured, "'user.name' and/or 'user.email' are not configured. Please run " + "`git config --global SETTING VALUE` to set them"); } @Override public Writer<GitRevision> newWriter(WriterContext writerContext) { WriterState state = new WriterState( localRepo, destinationOptions.getLocalBranch(push, writerContext.isDryRun())); return new WriterImpl<>( writerContext.isDryRun(), repoUrl, fetch, push, generalOptions, writerHook, state, destinationOptions.nonFastForwardPush, integrates, destinationOptions.lastRevFirstParent, destinationOptions.ignoreIntegrationErrors, destinationOptions.localRepoPath, destinationOptions.committerName, destinationOptions.committerEmail, destinationOptions.rebaseWhenBaseline(), gitOptions.visitChangePageSize); } /** * State to be maintained between writer instances. */ static class WriterState { boolean alreadyFetched; boolean firstWrite = true; final LazyResourceLoader<GitRepository> localRepo; final String localBranch; WriterState(LazyResourceLoader<GitRepository> localRepo, String localBranch) { this.localRepo = localRepo; this.localBranch = localBranch; } } /** * A writer for git.*destination destinations. Note that this is not a public interface and * shouldn't be used directly. */ public static class WriterImpl<S extends WriterState> implements Writer<GitRevision> { final boolean skipPush; private final String repoUrl; private final String remoteFetch; private final String remotePush; private final boolean force; // Only use this console when you don't receive one as a parameter. private final Console baseConsole; private final GeneralOptions generalOptions; private final WriteHook writeHook; final S state; // We could get it from destinationOptions but this is in preparation of a GH PR destination. private final boolean nonFastForwardPush; private final Iterable<GitIntegrateChanges> integrates; private final boolean lastRevFirstParent; private final boolean ignoreIntegrationErrors; private final String localRepoPath; private final String committerName; private final String committerEmail; private final boolean rebase; private final int visitChangePageSize; /** * Create a new git.destination writer */ WriterImpl(boolean skipPush, String repoUrl, String remoteFetch, String remotePush, GeneralOptions generalOptions, WriteHook writeHook, S state, boolean nonFastForwardPush, Iterable<GitIntegrateChanges> integrates, boolean lastRevFirstParent, boolean ignoreIntegrationErrors, String localRepoPath, String committerName, String committerEmail, boolean rebase, int visitChangePageSize) { this.skipPush = skipPush; this.repoUrl = checkNotNull(repoUrl); this.remoteFetch = checkNotNull(remoteFetch); this.remotePush = checkNotNull(remotePush); this.force = generalOptions.isForced(); this.baseConsole = checkNotNull(generalOptions.console()); this.generalOptions = generalOptions; this.writeHook = checkNotNull(writeHook); this.state = checkNotNull(state); this.nonFastForwardPush = nonFastForwardPush; this.integrates = Preconditions.checkNotNull(integrates); this.lastRevFirstParent = lastRevFirstParent; this.ignoreIntegrationErrors = ignoreIntegrationErrors; this.localRepoPath = localRepoPath; this.committerName = committerName; this.committerEmail = committerEmail; this.rebase = rebase; this.visitChangePageSize = visitChangePageSize; } @Override public void visitChanges(@Nullable GitRevision start, ChangesVisitor visitor) throws RepoException, ValidationException { GitRepository repository = getRepository(baseConsole); try { fetchIfNeeded(repository, baseConsole); } catch (ValidationException e) { throw new CannotResolveRevisionException( "Cannot visit changes because fetch failed. Does the destination branch exist?", e); } GitRevision startRef = getLocalBranchRevision(repository); if (startRef == null) { return; } ChangeReader.Builder queryChanges = ChangeReader.Builder.forDestination(repository, baseConsole) .setVerbose(generalOptions.isVerbose()); GitVisitorUtil.visitChanges( start == null ? startRef : start, visitor, queryChanges, generalOptions, "destination", visitChangePageSize); } /** * Do a fetch iff we haven't done one already. Prevents doing unnecessary fetches. */ private void fetchIfNeeded(GitRepository repo, Console console) throws RepoException, ValidationException { if (!state.alreadyFetched) { GitRevision revision = fetchFromRemote(console, repo, repoUrl, remoteFetch); if (revision != null) { repo.simpleCommand("branch", state.localBranch, revision.getSha1()); } state.alreadyFetched = true; } } @Nullable @Override public DestinationStatus getDestinationStatus(Glob destinationFiles, String labelName) throws RepoException, ValidationException { GitRepository repo = getRepository(baseConsole); try { fetchIfNeeded(repo, baseConsole); } catch (ValidationException e) { return null; } GitRevision startRef = getLocalBranchRevision(repo); if (startRef == null) { return null; } PathMatcher pathMatcher = destinationFiles.relativeTo(Paths.get("")); DestinationStatusVisitor visitor = new DestinationStatusVisitor(pathMatcher, labelName); ChangeReader.Builder changeReader = ChangeReader.Builder.forDestination(repo, baseConsole) .setVerbose(generalOptions.isVerbose()) .setFirstParent(lastRevFirstParent) .grep("^" + labelName + ORIGIN_LABEL_SEPARATOR); try { // Using same visitChangePageSize for now GitVisitorUtil.visitChanges( startRef, visitor, changeReader, generalOptions, "get_destination_status", visitChangePageSize); } catch (CannotResolveRevisionException e) { // TODO: handle return null; } return visitor.getDestinationStatus(); } @Override public Endpoint getFeedbackEndPoint(Console console) throws ValidationException { return writeHook.getFeedbackEndPoint(console); } @Nullable private GitRevision getLocalBranchRevision(GitRepository gitRepository) throws RepoException { try { return gitRepository.resolveReference(state.localBranch); } catch (CannotResolveRevisionException e) { if (force) { return null; } throw new RepoException(String.format("Could not find %s in %s and '%s' was not used", remoteFetch, repoUrl, GeneralOptions.FORCE)); } } @Override public boolean supportsHistory() { return true; } /** * A write hook allows us to customize the behavior or git.destination writer for other * implementations. */ public interface WriteHook { /** Customize the writer for a particular destination. */ MessageInfo generateMessageInfo(TransformResult transformResult) throws ValidationException, RepoException; /** * Validate or do modifications to the current change to be pushed. * * <p>{@code HEAD} commit should point to the commit to be pushed. Any change on the local * git repo should keep current commit as HEAD or do the proper modifications to make HEAD to * point to a new/modified changes(s). */ default void beforePush(GitRepository repo, MessageInfo messageInfo, boolean skipPush, List<? extends Change<?>> originChanges) throws RepoException, ValidationException { } /** * Construct the reference to push based on the pushToRefsFor reference. Implementations of * this method can change the reference to a different reference. */ String getPushReference(String pushToRefsFor, TransformResult transformResult); /** * Process the server response from the push command and compute the effects that happened */ ImmutableList<DestinationEffect> afterPush(String serverResponse, MessageInfo messageInfo, GitRevision pushedRevision, List<? extends Change<?>> originChanges); default Endpoint getFeedbackEndPoint(Console console) throws ValidationException { return Endpoint.NOOP_ENDPOINT; } default ImmutableSetMultimap<String, String> describe() { return ImmutableSetMultimap.of(); } } /** * A Write hook for standard git repositories */ public static class DefaultWriteHook implements WriteHook { @Override public MessageInfo generateMessageInfo(TransformResult transformResult) { Revision rev = transformResult.getCurrentRevision(); return new MessageInfo( transformResult.isSetRevId() ? ImmutableList.of(new LabelFinder( transformResult.getRevIdLabel() + ORIGIN_LABEL_SEPARATOR + rev.asString())) : ImmutableList.of()); } @Override public ImmutableList<DestinationEffect> afterPush(String serverResponse, MessageInfo messageInfo, GitRevision pushedRevision, List<? extends Change<?>> originChanges) { return ImmutableList.of( new DestinationEffect( DestinationEffect.Type.CREATED, String.format("Created revision %s", pushedRevision.getSha1()), originChanges, new DestinationEffect.DestinationRef( pushedRevision.getSha1(), "commit", /*url=*/ null))); } @Override public String getPushReference(String pushToRefsFor, TransformResult transformResult) { return pushToRefsFor; } } @Override public ImmutableList<DestinationEffect> write(TransformResult transformResult, Glob destinationFiles, Console console) throws ValidationException, RepoException, IOException { logger.atInfo().log( "Exporting from %s to: url=%s ref=%s", transformResult.getPath(), repoUrl, remotePush); String baseline = transformResult.getBaseline(); GitRepository scratchClone = getRepository(console); fetchIfNeeded(scratchClone, console); console.progress("Git Destination: Checking out " + remoteFetch); GitRevision localBranchRevision = getLocalBranchRevision(scratchClone); updateLocalBranchToBaseline(scratchClone, baseline); if (state.firstWrite) { String reference = baseline != null ? baseline : state.localBranch; configForPush(getRepository(console), repoUrl, remotePush); if (!force && localBranchRevision == null) { throw new RepoException(String.format( "Cannot checkout '%s' from '%s'. Use '%s' if the destination is a new git repo or" + " you don't care about the destination current status", reference, repoUrl, GeneralOptions.FORCE)); } if (localBranchRevision != null) { scratchClone.simpleCommand("checkout", "-f", "-q", reference); } else { // Configure the commit to go to local branch instead of master. scratchClone.simpleCommand("symbolic-ref", "HEAD", getCompleteRef(state.localBranch)); } state.firstWrite = false; } else if (!skipPush) { // Should be a no-op, but an iterative migration could take several minutes between // migrations so lets fetch the latest first. fetchFromRemote(console, scratchClone, repoUrl, remoteFetch); } PathMatcher pathMatcher = destinationFiles.relativeTo(scratchClone.getWorkTree()); // Get the submodules before we stage them for deletion with // repo.simpleCommand(add --all) AddExcludedFilesToIndex excludedAdder = new AddExcludedFilesToIndex(scratchClone, pathMatcher); excludedAdder.findSubmodules(console); GitRepository alternate = scratchClone.withWorkTree(transformResult.getPath()); console.progress("Git Destination: Adding all files"); try (ProfilerTask ignored = generalOptions.profiler().start("add_files")) { alternate.add().force().all().run(); } console.progress("Git Destination: Excluding files"); try (ProfilerTask ignored = generalOptions.profiler().start("exclude_files")) { excludedAdder.add(); } console.progress("Git Destination: Creating a local commit"); MessageInfo messageInfo = writeHook.generateMessageInfo(transformResult); ChangeMessage msg = ChangeMessage.parseMessage(transformResult.getSummary()); for (LabelFinder label : messageInfo.labelsToAdd) { msg = msg.withNewOrReplacedLabel(label.getName(), label.getSeparator(), label.getValue()); } String commitMessage = msg.toString(); alternate.commit( transformResult.getAuthor().toString(), transformResult.getTimestamp(), commitMessage); ValidationException.checkCondition(!transformResult.getSummary().trim().isEmpty(), "Change description is empty."); for (GitIntegrateChanges integrate : integrates) { integrate.run(alternate, generalOptions, messageInfo, path -> !pathMatcher.matches(scratchClone.getWorkTree().resolve(path)), transformResult, ignoreIntegrationErrors); } // Don't leave unstaged/untracked files in the work-tree. This is a problem for rebase // and in general any inspection of the directory after Copybara execution. // Clean unstaged: scratchClone.simpleCommand("reset", "--hard"); // ...and untracked ones: scratchClone.forceClean(); if (baseline != null && rebase) { // Note that it is a different work-tree from the previous reset alternate.simpleCommand("reset", "--hard"); alternate.rebase(localBranchRevision.getSha1()); } if (localRepoPath != null) { scratchClone.simpleCommand("checkout", state.localBranch); } if (transformResult.isAskForConfirmation()) { // The git repo contains the staged changes at this point. Git diff writes to Stdout console.info(DiffUtil.colorize( console, scratchClone.simpleCommand("show", "HEAD").getStdout())); if (!console.promptConfirmation( String.format("Proceed with push to %s %s?", repoUrl, remotePush))) { console.warn("Migration aborted by user."); throw new ChangeRejectedException( "User aborted execution: did not confirm diff changes."); } } GitRevision head = scratchClone.resolveReference("HEAD"); SkylarkList<? extends Change<?>> originChanges = transformResult.getChanges().getCurrent(); // BeforePush will update existing PRs in github if skip push is not true writeHook.beforePush(scratchClone, messageInfo, skipPush, originChanges); if (skipPush) { console.infoFmt( "Git Destination: skipped push to remote. Check the local commits at %s", scratchClone.getGitDir()); return ImmutableList.of( new DestinationEffect( DestinationEffect.Type.CREATED, String.format( "Dry run commit '%s' created locally at %s", head, scratchClone.getGitDir()), originChanges, new DestinationEffect.DestinationRef(head.getSha1(), "commit", /*url=*/ null))); } String push = writeHook.getPushReference(getCompleteRef(remotePush), transformResult); console.progress(String.format("Git Destination: Pushing to %s %s", repoUrl, push)); checkCondition(!nonFastForwardPush || !Objects.equals(remoteFetch, remotePush), "non fast-forward push is only" + " allowed when fetch != push"); String serverResponse = generalOptions.repoTask( "push", () -> scratchClone.push() .withRefspecs(repoUrl, ImmutableList.of(scratchClone.createRefSpec( (nonFastForwardPush ? "+" : "") + "HEAD:" + push))) .run() ); return writeHook.afterPush(serverResponse, messageInfo, head, originChanges); } /** * Get the local {@link GitRepository} associated with the writer. * * Note that this is not a public interface and is subjec to change. */ public GitRepository getRepository(Console console) throws RepoException, ValidationException { return state.localRepo.load(console); } private void updateLocalBranchToBaseline(GitRepository repo, String baseline) throws RepoException { if (baseline != null && !repo.refExists(baseline)) { throw new RepoException("Cannot find baseline '" + baseline + (getLocalBranchRevision(repo) != null ? "' from fetch reference '" + remoteFetch + "'" : "' and fetch reference '" + remoteFetch + "' itself") + " in " + repoUrl + "."); } else if (baseline != null) { // Update the local branch to use the baseline repo.simpleCommand("update-ref", state.localBranch, baseline); } } @Nullable private GitRevision fetchFromRemote(Console console, GitRepository repo, String repoUrl, String fetch) throws RepoException, ValidationException { String completeFetchRef = getCompleteRef(fetch); try (ProfilerTask ignore = generalOptions.profiler().start("destination_fetch")){ console.progress("Git Destination: Fetching: " + repoUrl + " " + completeFetchRef); return repo.fetchSingleRef(repoUrl, completeFetchRef); } catch (CannotResolveRevisionException e) { String warning = format("Git Destination: '%s' doesn't exist in '%s'", completeFetchRef, repoUrl); checkCondition(force, "%s. Use %s flag if you want to push anyway", warning, FORCE); console.warn(warning); } return null; } private String getCompleteRef(String fetch) { // Assume that it is a branch. Doesn't work for tags. But we don't update tags (For now). return fetch.startsWith("refs/") ? fetch : "refs/heads/" + fetch; } private void configForPush(GitRepository repo, String repoUrl, String push) throws RepoException, ValidationException { if (localRepoPath != null) { // Configure the local repo to allow pushing to the ref manually outside of Copybara repo.simpleCommand("config", "remote.copybara_remote.url", repoUrl); repo.simpleCommand("config", "remote.copybara_remote.push", state.localBranch + ":" + push); repo.simpleCommand("config", "branch." + state.localBranch + ".remote", "copybara_remote"); } if (!Strings.isNullOrEmpty(committerName)) { repo.simpleCommand("config", "user.name", committerName); } if (!Strings.isNullOrEmpty(committerEmail)) { repo.simpleCommand("config", "user.email", committerEmail); } verifyUserInfoConfigured(repo); } } @VisibleForTesting String getFetch() { return fetch; } @VisibleForTesting String getPush() { return push; } @Override public String getLabelNameWhenOrigin() { return GitRepository.GIT_ORIGIN_REV_ID; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("repoUrl", repoUrl) .add("fetch", fetch) .add("push", push) .toString(); } /** * Not a public API. It is subject to change. */ public LazyResourceLoader<GitRepository> getLocalRepo() { return localRepo; } @Override public String getType() { return "git.destination"; } @Override public ImmutableSetMultimap<String, String> describe(Glob originFiles) { ImmutableSetMultimap.Builder<String, String> builder = new ImmutableSetMultimap.Builder<String, String>() .put("type", getType()) .put("url", repoUrl) .put("fetch", fetch) .put("push", push); builder.putAll(writerHook.describe()); return builder.build(); } }
java/com/google/copybara/git/GitDestination.java
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.copybara.git; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.copybara.GeneralOptions.FORCE; import static com.google.copybara.LazyResourceLoader.memoized; import static com.google.copybara.exception.ValidationException.checkCondition; import static java.lang.String.format; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.flogger.FluentLogger; import com.google.copybara.Change; import com.google.copybara.ChangeMessage; import com.google.copybara.Destination; import com.google.copybara.DestinationEffect; import com.google.copybara.DestinationStatusVisitor; import com.google.copybara.Endpoint; import com.google.copybara.GeneralOptions; import com.google.copybara.LabelFinder; import com.google.copybara.LazyResourceLoader; import com.google.copybara.Revision; import com.google.copybara.TransformResult; import com.google.copybara.WriterContext; import com.google.copybara.exception.CannotResolveRevisionException; import com.google.copybara.exception.ChangeRejectedException; import com.google.copybara.exception.RepoException; import com.google.copybara.exception.ValidationException; import com.google.copybara.git.GitDestination.WriterImpl.WriteHook; import com.google.copybara.profiler.Profiler.ProfilerTask; import com.google.copybara.util.DiffUtil; import com.google.copybara.util.Glob; import com.google.copybara.util.console.Console; import com.google.devtools.build.lib.syntax.SkylarkList; import java.io.IOException; import java.nio.file.PathMatcher; import java.nio.file.Paths; import java.util.List; import java.util.Objects; import javax.annotation.Nullable; /** * A Git repository destination. */ public final class GitDestination implements Destination<GitRevision> { private static final String ORIGIN_LABEL_SEPARATOR = ": "; static class MessageInfo { final ImmutableList<LabelFinder> labelsToAdd; MessageInfo(ImmutableList<LabelFinder> labelsToAdd) { this.labelsToAdd = checkNotNull(labelsToAdd); } } private static final FluentLogger logger = FluentLogger.forEnclosingClass(); private final String repoUrl; private final String fetch; private final String push; private final GitDestinationOptions destinationOptions; private final GitOptions gitOptions; private final GeneralOptions generalOptions; private final Iterable<GitIntegrateChanges> integrates; private final WriteHook writerHook; private final LazyResourceLoader<GitRepository> localRepo; GitDestination( String repoUrl, String fetch, String push, GitDestinationOptions destinationOptions, GitOptions gitOptions, GeneralOptions generalOptions, WriteHook writerHook, Iterable<GitIntegrateChanges> integrates) { this.repoUrl = checkNotNull(repoUrl); this.fetch = checkNotNull(fetch); this.push = checkNotNull(push); this.destinationOptions = checkNotNull(destinationOptions); this.gitOptions = Preconditions.checkNotNull(gitOptions); this.generalOptions = Preconditions.checkNotNull(generalOptions); this.integrates = Preconditions.checkNotNull(integrates); this.writerHook = checkNotNull(writerHook); this.localRepo = memoized(ignored -> destinationOptions.localGitRepo(repoUrl)); } /** * Throws an exception if the user.email or user.name Git configuration settings are not set. This * helps ensure that the committer field of generated commits is correct. */ private static void verifyUserInfoConfigured(GitRepository repo) throws RepoException, ValidationException { String output = repo.simpleCommand("config", "-l").getStdout(); boolean nameConfigured = false; boolean emailConfigured = false; for (String line : output.split("\n")) { if (line.startsWith("user.name=")) { nameConfigured = true; } else if (line.startsWith("user.email=")) { emailConfigured = true; } } checkCondition(nameConfigured && emailConfigured, "'user.name' and/or 'user.email' are not configured. Please run " + "`git config --global SETTING VALUE` to set them"); } @Override public Writer<GitRevision> newWriter(WriterContext writerContext) { WriterState state = new WriterState( localRepo, destinationOptions.getLocalBranch(push, writerContext.isDryRun())); return new WriterImpl<>( writerContext.isDryRun(), repoUrl, fetch, push, generalOptions, writerHook, state, destinationOptions.nonFastForwardPush, integrates, destinationOptions.lastRevFirstParent, destinationOptions.ignoreIntegrationErrors, destinationOptions.localRepoPath, destinationOptions.committerName, destinationOptions.committerEmail, destinationOptions.rebaseWhenBaseline(), gitOptions.visitChangePageSize); } /** * State to be maintained between writer instances. */ static class WriterState { boolean alreadyFetched; boolean firstWrite = true; final LazyResourceLoader<GitRepository> localRepo; final String localBranch; WriterState(LazyResourceLoader<GitRepository> localRepo, String localBranch) { this.localRepo = localRepo; this.localBranch = localBranch; } } /** * A writer for git.*destination destinations. Note that this is not a public interface and * shouldn't be used directly. */ public static class WriterImpl<S extends WriterState> implements Writer<GitRevision> { final boolean skipPush; private final String repoUrl; private final String remoteFetch; private final String remotePush; private final boolean force; // Only use this console when you don't receive one as a parameter. private final Console baseConsole; private final GeneralOptions generalOptions; private final WriteHook writeHook; final S state; // We could get it from destinationOptions but this is in preparation of a GH PR destination. private final boolean nonFastForwardPush; private final Iterable<GitIntegrateChanges> integrates; private final boolean lastRevFirstParent; private final boolean ignoreIntegrationErrors; private final String localRepoPath; private final String committerName; private final String committerEmail; private final boolean rebase; private final int visitChangePageSize; /** * Create a new git.destination writer */ WriterImpl(boolean skipPush, String repoUrl, String remoteFetch, String remotePush, GeneralOptions generalOptions, WriteHook writeHook, S state, boolean nonFastForwardPush, Iterable<GitIntegrateChanges> integrates, boolean lastRevFirstParent, boolean ignoreIntegrationErrors, String localRepoPath, String committerName, String committerEmail, boolean rebase, int visitChangePageSize) { this.skipPush = skipPush; this.repoUrl = checkNotNull(repoUrl); this.remoteFetch = checkNotNull(remoteFetch); this.remotePush = checkNotNull(remotePush); this.force = generalOptions.isForced(); this.baseConsole = checkNotNull(generalOptions.console()); this.generalOptions = generalOptions; this.writeHook = checkNotNull(writeHook); this.state = checkNotNull(state); this.nonFastForwardPush = nonFastForwardPush; this.integrates = Preconditions.checkNotNull(integrates); this.lastRevFirstParent = lastRevFirstParent; this.ignoreIntegrationErrors = ignoreIntegrationErrors; this.localRepoPath = localRepoPath; this.committerName = committerName; this.committerEmail = committerEmail; this.rebase = rebase; this.visitChangePageSize = visitChangePageSize; } @Override public void visitChanges(@Nullable GitRevision start, ChangesVisitor visitor) throws RepoException, ValidationException { GitRepository repository = getRepository(baseConsole); try { fetchIfNeeded(repository, baseConsole); } catch (ValidationException e) { throw new CannotResolveRevisionException( "Cannot visit changes because fetch failed. Does the destination branch exist?", e); } GitRevision startRef = getLocalBranchRevision(repository); if (startRef == null) { return; } ChangeReader.Builder queryChanges = ChangeReader.Builder.forDestination(repository, baseConsole) .setVerbose(generalOptions.isVerbose()); GitVisitorUtil.visitChanges( start == null ? startRef : start, visitor, queryChanges, generalOptions, "destination", visitChangePageSize); } /** * Do a fetch iff we haven't done one already. Prevents doing unnecessary fetches. */ private void fetchIfNeeded(GitRepository repo, Console console) throws RepoException, ValidationException { if (!state.alreadyFetched) { GitRevision revision = fetchFromRemote(console, repo, repoUrl, remoteFetch); if (revision != null) { repo.simpleCommand("branch", state.localBranch, revision.getSha1()); } state.alreadyFetched = true; } } @Nullable @Override public DestinationStatus getDestinationStatus(Glob destinationFiles, String labelName) throws RepoException, ValidationException { GitRepository repo = getRepository(baseConsole); try { fetchIfNeeded(repo, baseConsole); } catch (ValidationException e) { return null; } GitRevision startRef = getLocalBranchRevision(repo); if (startRef == null) { return null; } PathMatcher pathMatcher = destinationFiles.relativeTo(Paths.get("")); DestinationStatusVisitor visitor = new DestinationStatusVisitor(pathMatcher, labelName); ChangeReader.Builder changeReader = ChangeReader.Builder.forDestination(repo, baseConsole) .setVerbose(generalOptions.isVerbose()) .setFirstParent(lastRevFirstParent) .grep("^" + labelName + ORIGIN_LABEL_SEPARATOR); try { // Using same visitChangePageSize for now GitVisitorUtil.visitChanges( startRef, visitor, changeReader, generalOptions, "get_destination_status", visitChangePageSize); } catch (CannotResolveRevisionException e) { // TODO: handle return null; } return visitor.getDestinationStatus(); } @Override public Endpoint getFeedbackEndPoint(Console console) throws ValidationException { return writeHook.getFeedbackEndPoint(console); } @Nullable private GitRevision getLocalBranchRevision(GitRepository gitRepository) throws RepoException { try { return gitRepository.resolveReference(state.localBranch); } catch (CannotResolveRevisionException e) { if (force) { return null; } throw new RepoException(String.format("Could not find %s in %s and '%s' was not used", remoteFetch, repoUrl, GeneralOptions.FORCE)); } } @Override public boolean supportsHistory() { return true; } /** * A write hook allows us to customize the behavior or git.destination writer for other * implementations. */ public interface WriteHook { /** Customize the writer for a particular destination. */ MessageInfo generateMessageInfo(TransformResult transformResult) throws ValidationException, RepoException; /** * Validate or do modifications to the current change to be pushed. * * <p>{@code HEAD} commit should point to the commit to be pushed. Any change on the local * git repo should keep current commit as HEAD or do the proper modifications to make HEAD to * point to a new/modified changes(s). */ default void beforePush(GitRepository repo, MessageInfo messageInfo, boolean skipPush, List<? extends Change<?>> originChanges) throws RepoException, ValidationException { } /** * Construct the reference to push based on the pushToRefsFor reference. Implementations of * this method can change the reference to a different reference. */ String getPushReference(String pushToRefsFor, TransformResult transformResult); /** * Process the server response from the push command and compute the effects that happened */ ImmutableList<DestinationEffect> afterPush(String serverResponse, MessageInfo messageInfo, GitRevision pushedRevision, List<? extends Change<?>> originChanges); default Endpoint getFeedbackEndPoint(Console console) throws ValidationException { return Endpoint.NOOP_ENDPOINT; } default ImmutableSetMultimap<String, String> describe() { return ImmutableSetMultimap.of(); } } /** * A Write hook for standard git repositories */ public static class DefaultWriteHook implements WriteHook { @Override public MessageInfo generateMessageInfo(TransformResult transformResult) { Revision rev = transformResult.getCurrentRevision(); return new MessageInfo( transformResult.isSetRevId() ? ImmutableList.of(new LabelFinder( transformResult.getRevIdLabel() + ORIGIN_LABEL_SEPARATOR + rev.asString())) : ImmutableList.of()); } @Override public ImmutableList<DestinationEffect> afterPush(String serverResponse, MessageInfo messageInfo, GitRevision pushedRevision, List<? extends Change<?>> originChanges) { return ImmutableList.of( new DestinationEffect( DestinationEffect.Type.CREATED, String.format("Created revision %s", pushedRevision.getSha1()), originChanges, new DestinationEffect.DestinationRef( pushedRevision.getSha1(), "commit", /*url=*/ null))); } @Override public String getPushReference(String pushToRefsFor, TransformResult transformResult) { return pushToRefsFor; } } @Override public ImmutableList<DestinationEffect> write(TransformResult transformResult, Glob destinationFiles, Console console) throws ValidationException, RepoException, IOException { logger.atInfo().log("Exporting from %s to: %s", transformResult.getPath(), this); String baseline = transformResult.getBaseline(); GitRepository scratchClone = getRepository(console); fetchIfNeeded(scratchClone, console); console.progress("Git Destination: Checking out " + remoteFetch); GitRevision localBranchRevision = getLocalBranchRevision(scratchClone); updateLocalBranchToBaseline(scratchClone, baseline); if (state.firstWrite) { String reference = baseline != null ? baseline : state.localBranch; configForPush(getRepository(console), repoUrl, remotePush); if (!force && localBranchRevision == null) { throw new RepoException(String.format( "Cannot checkout '%s' from '%s'. Use '%s' if the destination is a new git repo or" + " you don't care about the destination current status", reference, repoUrl, GeneralOptions.FORCE)); } if (localBranchRevision != null) { scratchClone.simpleCommand("checkout", "-f", "-q", reference); } else { // Configure the commit to go to local branch instead of master. scratchClone.simpleCommand("symbolic-ref", "HEAD", getCompleteRef(state.localBranch)); } state.firstWrite = false; } else if (!skipPush) { // Should be a no-op, but an iterative migration could take several minutes between // migrations so lets fetch the latest first. fetchFromRemote(console, scratchClone, repoUrl, remoteFetch); } PathMatcher pathMatcher = destinationFiles.relativeTo(scratchClone.getWorkTree()); // Get the submodules before we stage them for deletion with // repo.simpleCommand(add --all) AddExcludedFilesToIndex excludedAdder = new AddExcludedFilesToIndex(scratchClone, pathMatcher); excludedAdder.findSubmodules(console); GitRepository alternate = scratchClone.withWorkTree(transformResult.getPath()); console.progress("Git Destination: Adding all files"); alternate.add().force().all().run(); console.progress("Git Destination: Excluding files"); excludedAdder.add(); console.progress("Git Destination: Creating a local commit"); MessageInfo messageInfo = writeHook.generateMessageInfo(transformResult); ChangeMessage msg = ChangeMessage.parseMessage(transformResult.getSummary()); for (LabelFinder label : messageInfo.labelsToAdd) { msg = msg.withNewOrReplacedLabel(label.getName(), label.getSeparator(), label.getValue()); } String commitMessage = msg.toString(); alternate.commit( transformResult.getAuthor().toString(), transformResult.getTimestamp(), commitMessage); ValidationException.checkCondition(!transformResult.getSummary().trim().isEmpty(), "Change description is empty."); for (GitIntegrateChanges integrate : integrates) { integrate.run(alternate, generalOptions, messageInfo, path -> !pathMatcher.matches(scratchClone.getWorkTree().resolve(path)), transformResult, ignoreIntegrationErrors); } // Don't leave unstaged/untracked files in the work-tree. This is a problem for rebase // and in general any inspection of the directory after Copybara execution. // Clean unstaged: scratchClone.simpleCommand("reset", "--hard"); // ...and untracked ones: scratchClone.forceClean(); if (baseline != null && rebase) { // Note that it is a different work-tree from the previous reset alternate.simpleCommand("reset", "--hard"); alternate.rebase(localBranchRevision.getSha1()); } if (localRepoPath != null) { scratchClone.simpleCommand("checkout", state.localBranch); } if (transformResult.isAskForConfirmation()) { // The git repo contains the staged changes at this point. Git diff writes to Stdout console.info(DiffUtil.colorize( console, scratchClone.simpleCommand("show", "HEAD").getStdout())); if (!console.promptConfirmation( String.format("Proceed with push to %s %s?", repoUrl, remotePush))) { console.warn("Migration aborted by user."); throw new ChangeRejectedException( "User aborted execution: did not confirm diff changes."); } } GitRevision head = scratchClone.resolveReference("HEAD"); SkylarkList<? extends Change<?>> originChanges = transformResult.getChanges().getCurrent(); // BeforePush will update existing PRs in github if skip push is not true writeHook.beforePush(scratchClone, messageInfo, skipPush, originChanges); if (skipPush) { console.infoFmt( "Git Destination: skipped push to remote. Check the local commits at %s", scratchClone.getGitDir()); return ImmutableList.of( new DestinationEffect( DestinationEffect.Type.CREATED, String.format( "Dry run commit '%s' created locally at %s", head, scratchClone.getGitDir()), originChanges, new DestinationEffect.DestinationRef(head.getSha1(), "commit", /*url=*/ null))); } String push = writeHook.getPushReference(getCompleteRef(remotePush), transformResult); console.progress(String.format("Git Destination: Pushing to %s %s", repoUrl, push)); checkCondition(!nonFastForwardPush || !Objects.equals(remoteFetch, remotePush), "non fast-forward push is only" + " allowed when fetch != push"); String serverResponse = generalOptions.repoTask( "push", () -> scratchClone.push() .withRefspecs(repoUrl, ImmutableList.of(scratchClone.createRefSpec( (nonFastForwardPush ? "+" : "") + "HEAD:" + push))) .run() ); return writeHook.afterPush(serverResponse, messageInfo, head, originChanges); } /** * Get the local {@link GitRepository} associated with the writer. * * Note that this is not a public interface and is subjec to change. */ public GitRepository getRepository(Console console) throws RepoException, ValidationException { return state.localRepo.load(console); } private void updateLocalBranchToBaseline(GitRepository repo, String baseline) throws RepoException { if (baseline != null && !repo.refExists(baseline)) { throw new RepoException("Cannot find baseline '" + baseline + (getLocalBranchRevision(repo) != null ? "' from fetch reference '" + remoteFetch + "'" : "' and fetch reference '" + remoteFetch + "' itself") + " in " + repoUrl + "."); } else if (baseline != null) { // Update the local branch to use the baseline repo.simpleCommand("update-ref", state.localBranch, baseline); } } @Nullable private GitRevision fetchFromRemote(Console console, GitRepository repo, String repoUrl, String fetch) throws RepoException, ValidationException { String completeFetchRef = getCompleteRef(fetch); try (ProfilerTask ignore = generalOptions.profiler().start("destination_fetch")){ console.progress("Git Destination: Fetching: " + repoUrl + " " + completeFetchRef); return repo.fetchSingleRef(repoUrl, completeFetchRef); } catch (CannotResolveRevisionException e) { String warning = format("Git Destination: '%s' doesn't exist in '%s'", completeFetchRef, repoUrl); checkCondition(force, "%s. Use %s flag if you want to push anyway", warning, FORCE); console.warn(warning); } return null; } private String getCompleteRef(String fetch) { // Assume that it is a branch. Doesn't work for tags. But we don't update tags (For now). return fetch.startsWith("refs/") ? fetch : "refs/heads/" + fetch; } private void configForPush(GitRepository repo, String repoUrl, String push) throws RepoException, ValidationException { if (localRepoPath != null) { // Configure the local repo to allow pushing to the ref manually outside of Copybara repo.simpleCommand("config", "remote.copybara_remote.url", repoUrl); repo.simpleCommand("config", "remote.copybara_remote.push", state.localBranch + ":" + push); repo.simpleCommand("config", "branch." + state.localBranch + ".remote", "copybara_remote"); } if (!Strings.isNullOrEmpty(committerName)) { repo.simpleCommand("config", "user.name", committerName); } if (!Strings.isNullOrEmpty(committerEmail)) { repo.simpleCommand("config", "user.email", committerEmail); } verifyUserInfoConfigured(repo); } } @VisibleForTesting String getFetch() { return fetch; } @VisibleForTesting String getPush() { return push; } @Override public String getLabelNameWhenOrigin() { return GitRepository.GIT_ORIGIN_REV_ID; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("repoUrl", repoUrl) .add("fetch", fetch) .add("push", push) .toString(); } /** * Not a public API. It is subject to change. */ public LazyResourceLoader<GitRepository> getLocalRepo() { return localRepo; } @Override public String getType() { return "git.destination"; } @Override public ImmutableSetMultimap<String, String> describe(Glob originFiles) { ImmutableSetMultimap.Builder<String, String> builder = new ImmutableSetMultimap.Builder<String, String>() .put("type", getType()) .put("url", repoUrl) .put("fetch", fetch) .put("push", push); builder.putAll(writerHook.describe()); return builder.build(); } }
Add profiling for GitDestination add/exclude files In some cases we can run a lot of git commands for excluded files when there are thousands of files (because we need batching). This should give more visibility to those times in the profiler. BUG=124371270 PiperOrigin-RevId: 234192373 Change-Id: I28bc18c8f9c0a133a4361d55ae01e8582e59d554
java/com/google/copybara/git/GitDestination.java
Add profiling for GitDestination add/exclude files
<ide><path>ava/com/google/copybara/git/GitDestination.java <ide> public ImmutableList<DestinationEffect> write(TransformResult transformResult, <ide> Glob destinationFiles, Console console) <ide> throws ValidationException, RepoException, IOException { <del> logger.atInfo().log("Exporting from %s to: %s", transformResult.getPath(), this); <add> logger.atInfo().log( <add> "Exporting from %s to: url=%s ref=%s", transformResult.getPath(), repoUrl, remotePush); <ide> String baseline = transformResult.getBaseline(); <ide> <ide> GitRepository scratchClone = getRepository(console); <ide> GitRepository alternate = scratchClone.withWorkTree(transformResult.getPath()); <ide> <ide> console.progress("Git Destination: Adding all files"); <del> alternate.add().force().all().run(); <add> try (ProfilerTask ignored = generalOptions.profiler().start("add_files")) { <add> alternate.add().force().all().run(); <add> } <ide> <ide> console.progress("Git Destination: Excluding files"); <del> excludedAdder.add(); <add> try (ProfilerTask ignored = generalOptions.profiler().start("exclude_files")) { <add> excludedAdder.add(); <add> } <ide> <ide> console.progress("Git Destination: Creating a local commit"); <ide> MessageInfo messageInfo = writeHook.generateMessageInfo(transformResult);
JavaScript
mit
20015de4b639c32fde162a1d2fc7d31f2752250d
0
Tangurin/SwiperHandler
(function () { 'use strict'; var Swiper = require('swiper'); /*=========================== SwiperHandler ===========================*/ var SwiperHandler = { window: null, swipers: {}, initialized: false, initialize: function() { if (SwiperHandler.initialized) { return false; } SwiperHandler.window = $(window); var swiperId = 1; $('.initSwiper').each(function() { var $this = $(this); var idAttribute = 'swiperId-'+ swiperId; var selector = '#'+ idAttribute; $this.attr('id', idAttribute); SwiperHandler.swipers[swiperId] = { selector: selector, swiperSelector: selector +' .swiper-container', element: $this, settings: eval('(' + $this.attr('data-swiper-settings') + ')'), instance: null, currentBreakpoint: 0, amountOfSlides: $('.swiper-slide', $this).length, }; swiperId++; }); SwiperHandler.buildAll(); SwiperHandler.window.resize(function() { clearTimeout(SwiperHandler.window.resizedFinished); SwiperHandler.window.resizedFinished = setTimeout(SwiperHandler.rebuildOnResize, 250); }); SwiperHandler.initialized = true; }, rebuildOnResize: function() { SwiperHandler._each(function(swiperId, swiper) { var swiper = SwiperHandler.swipers[swiperId]; var breakpoint = SwiperHandler.getBreakpoint(swiperId); if (swiper.currentBreakpoint != breakpoint) { SwiperHandler.build(swiperId); } }); SwiperHandler.syncSwipers(); }, buildAll: function() { SwiperHandler._each(function(swiperId, swiper) { SwiperHandler.build(swiperId); }); SwiperHandler.syncSwipers(); }, build: function(id) { var swiper = SwiperHandler.swipers[id]; if (swiper.instance != null) { SwiperHandler.destroy(id); } swiper.instance = new Swiper(swiper.swiperSelector, SwiperHandler.getSettings(id)); }, syncSwipers: function() { SwiperHandler._each(function(swiperId, swiper) { var syncGroup = swiper.settings.syncGroup; if (syncGroup) { SwiperHandler._each(function(iterationSwiperId, iterationSwiper) { if (swiperId != iterationSwiperId && iterationSwiper.settings.syncGroup == syncGroup) { swiper.instance.params.control = iterationSwiper.instance; } }); } }); }, getSettings: function(id) { var swiper = SwiperHandler.swipers[id]; var settings = swiper.settings || {}; var newSettings = settings.default || {}; var breakpoint = SwiperHandler.getBreakpoint(id); var breakpointSettings = {}; swiper.currentBreakpoint = breakpoint; if (breakpoint > 0) { breakpointSettings = swiper.settings.breakpoints[breakpoint]; } newSettings = $.extend({}, newSettings, breakpointSettings); if (settings.centerSlidesIfTooFew && swiper.amountOfSlides < newSettings.slidesPerView) { newSettings.centeredSlides = true; } //Add empty navigation if not exists newSettings.navigation = newSettings.navigation || {}; //Add current swiper-selector before prev/next nav button newSettings.navigation.prevButton = swiper.selector + ' ' + (newSettings.navigation.prevEl || '.swiper-button-prev'); newSettings.navigation.nextButton = swiper.selector + ' ' + (newSettings.navigation.nextEl || '.swiper-button-next'); return newSettings; }, getBreakpoint: function(id) { var swiper = SwiperHandler.swipers[id]; for (width in swiper.settings.breakpoints) { if (SwiperHandler.window.width() <= width) { return width; } } return 0; }, destroyAll: function() { SwiperHandler._each(function(swiperId, swiper) { SwiperHandler.destroy(swiperId); }); }, destroy: function(id) { if (SwiperHandler.swipers[id].instance != null) { SwiperHandler.swipers[id].instance.destroy(false, true); } }, _each: function(callback) { if (typeof callback != 'function') { return false; } $.each(SwiperHandler.swipers, function(swiperId, swiper) { callback(swiperId, swiper); }); }, }; window.SwiperHandler = SwiperHandler; })(); /*=========================== SwiperHandler AMD Export ===========================*/ if (typeof(module) !== 'undefined') { module.exports = window.SwiperHandler; } else if (typeof define === 'function' && define.amd) { define([], function () { 'use strict'; return window.SwiperHandler; }); }
SwiperHandler.js
(function () { 'use strict'; var Swiper = require('swiper'); /*=========================== SwiperHandler ===========================*/ var SwiperHandler = { window: null, swipers: {}, initialized: false, initialize: function() { if (SwiperHandler.initialized) { return false; } SwiperHandler.window = $(window); var swiperId = 1; $('.initSwiper').each(function() { var $this = $(this); var idAttribute = 'swiperId-'+ swiperId; var selector = '#'+ idAttribute; $this.attr('id', idAttribute); SwiperHandler.swipers[swiperId] = { selector: selector, swiperSelector: selector +' .swiper-container', element: $this, settings: eval('(' + $this.attr('data-swiper-settings') + ')'), instance: null, currentBreakpoint: 0, amountOfSlides: $('.swiper-slide', $this).length, }; swiperId++; }); SwiperHandler.buildAll(); SwiperHandler.window.resize(function() { clearTimeout(SwiperHandler.window.resizedFinished); SwiperHandler.window.resizedFinished = setTimeout(SwiperHandler.rebuildOnResize, 250); }); SwiperHandler.initialized = true; }, rebuildOnResize: function() { SwiperHandler._each(function(swiperId, swiper) { var swiper = SwiperHandler.swipers[swiperId]; var breakpoint = SwiperHandler.getBreakpoint(swiperId); if (swiper.currentBreakpoint != breakpoint) { SwiperHandler.build(swiperId); } }); SwiperHandler.syncSwipers(); }, buildAll: function() { SwiperHandler._each(function(swiperId, swiper) { SwiperHandler.build(swiperId); }); SwiperHandler.syncSwipers(); }, build: function(id) { var swiper = SwiperHandler.swipers[id]; if (swiper.instance != null) { SwiperHandler.destroy(id); } swiper.instance = new Swiper(swiper.swiperSelector, SwiperHandler.getSettings(id)); }, syncSwipers: function() { SwiperHandler._each(function(swiperId, swiper) { var syncGroup = swiper.settings.syncGroup; if (syncGroup) { SwiperHandler._each(function(iterationSwiperId, iterationSwiper) { if (swiperId != iterationSwiperId && iterationSwiper.settings.syncGroup == syncGroup) { swiper.instance.params.control = iterationSwiper.instance; } }); } }); }, getSettings: function(id) { var swiper = SwiperHandler.swipers[id]; var settings = swiper.settings || {}; var newSettings = settings.default || {}; var breakpoint = SwiperHandler.getBreakpoint(id); var breakpointSettings = {}; swiper.currentBreakpoint = breakpoint; if (breakpoint > 0) { breakpointSettings = swiper.settings.breakpoints[breakpoint]; } newSettings = $.extend({}, newSettings, breakpointSettings); if (settings.centerSlidesIfTooFew && swiper.amountOfSlides < newSettings.slidesPerView) { newSettings.centeredSlides = true; } //Add empty navigation if not exists newSettings.navigation = newSettings.navigation || {}; //Add current swiper-selector before prev/next nav button newSettings.navigation.prevButton = swiper.selector + ' ' + (newSettings.navigation.prevEl || '.swiper-button-prev'); newSettings.navigation.nextButton = swiper.selector + ' ' + (newSettings.navigation.nextEl || '.swiper-button-next'); console.log(newSettings); return newSettings; }, getBreakpoint: function(id) { var swiper = SwiperHandler.swipers[id]; for (width in swiper.settings.breakpoints) { if (SwiperHandler.window.width() <= width) { return width; } } return 0; }, destroyAll: function() { SwiperHandler._each(function(swiperId, swiper) { SwiperHandler.destroy(swiperId); }); }, destroy: function(id) { if (SwiperHandler.swipers[id].instance != null) { SwiperHandler.swipers[id].instance.destroy(false, true); } }, _each: function(callback) { if (typeof callback != 'function') { return false; } $.each(SwiperHandler.swipers, function(swiperId, swiper) { callback(swiperId, swiper); }); }, }; window.SwiperHandler = SwiperHandler; })(); /*=========================== SwiperHandler AMD Export ===========================*/ if (typeof(module) !== 'undefined') { module.exports = window.SwiperHandler; } else if (typeof define === 'function' && define.amd) { define([], function () { 'use strict'; return window.SwiperHandler; }); }
Remove console.log
SwiperHandler.js
Remove console.log
<ide><path>wiperHandler.js <ide> newSettings.navigation.prevButton = swiper.selector + ' ' + (newSettings.navigation.prevEl || '.swiper-button-prev'); <ide> newSettings.navigation.nextButton = swiper.selector + ' ' + (newSettings.navigation.nextEl || '.swiper-button-next'); <ide> <del> console.log(newSettings); <del> <ide> return newSettings; <ide> }, <ide> getBreakpoint: function(id) {
Java
apache-2.0
906a1f561a404184404ceb9a9e4a8ff18798e715
0
spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.scheduling.support; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.TimeZone; import java.util.stream.Stream; import org.joda.time.LocalDateTime; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.scheduling.TriggerContext; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.junit.jupiter.params.provider.Arguments.arguments; /** * Unit tests for {@link CronTrigger}. * * @author Dave Syer * @author Mark Fisher * @author Juergen Hoeller * @author Sam Brannen */ class CronTriggerTests { private final Calendar calendar = new GregorianCalendar(); private void setUp(LocalDateTime localDateTime, TimeZone timeZone) { this.calendar.setTimeZone(timeZone); this.calendar.setTime(localDateTime.toDate()); roundup(this.calendar); } @ParameterizedCronTriggerTest void matchAll(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * * * *", timeZone); TriggerContext context = getTriggerContext(localDateTime.toDate()); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void matchLastSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * * * *", timeZone); GregorianCalendar calendar = new GregorianCalendar(); calendar.set(Calendar.SECOND, 58); assertMatchesNextSecond(trigger, calendar); } @ParameterizedCronTriggerTest void matchSpecificSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("10 * * * * *", timeZone); GregorianCalendar calendar = new GregorianCalendar(); calendar.set(Calendar.SECOND, 9); assertMatchesNextSecond(trigger, calendar); } @ParameterizedCronTriggerTest void incrementSecondByOne(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("11 * * * * *", timeZone); this.calendar.set(Calendar.SECOND, 10); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.SECOND, 1); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementSecondWithPreviousExecutionTooEarly(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("11 * * * * *", timeZone); this.calendar.set(Calendar.SECOND, 11); SimpleTriggerContext context = new SimpleTriggerContext(); context.update(this.calendar.getTime(), new Date(this.calendar.getTimeInMillis() - 100), new Date(this.calendar.getTimeInMillis() - 90)); this.calendar.add(Calendar.MINUTE, 1); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementSecondAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("10 * * * * *", timeZone); this.calendar.set(Calendar.SECOND, 11); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.SECOND, 59); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void secondRange(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("10-15 * * * * *", timeZone); this.calendar.set(Calendar.SECOND, 9); assertMatchesNextSecond(trigger, this.calendar); this.calendar.set(Calendar.SECOND, 14); assertMatchesNextSecond(trigger, this.calendar); } @ParameterizedCronTriggerTest void incrementMinute(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 * * * * *", timeZone); this.calendar.set(Calendar.MINUTE, 10); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(localDate); localDate = trigger.nextExecutionTime(context1); assertThat(localDate).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.MINUTE, 1); TriggerContext context2 = getTriggerContext(localDate); localDate = trigger.nextExecutionTime(context2); assertThat(localDate).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementMinuteByOne(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 11 * * * *", timeZone); this.calendar.set(Calendar.MINUTE, 10); TriggerContext context = getTriggerContext(this.calendar.getTime()); this.calendar.add(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 0); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementMinuteAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 10 * * * *", timeZone); this.calendar.set(Calendar.MINUTE, 11); this.calendar.set(Calendar.SECOND, 0); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.MINUTE, 59); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementHour(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 * * * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 30); this.calendar.set(Calendar.HOUR_OF_DAY, 11); this.calendar.set(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 0); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.HOUR_OF_DAY, 12); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.HOUR_OF_DAY, 13); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementHourAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 * * * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 10); this.calendar.set(Calendar.HOUR_OF_DAY, 23); this.calendar.set(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 0); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.DAY_OF_MONTH, 11); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.HOUR_OF_DAY, 1); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementDayOfMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 1); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(localDate); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); assertThat(this.calendar.get(Calendar.DAY_OF_MONTH)).isEqualTo(2); this.calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); assertThat(this.calendar.get(Calendar.DAY_OF_MONTH)).isEqualTo(3); } @ParameterizedCronTriggerTest void incrementDayOfMonthByOne(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * 10 * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 9); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementDayOfMonthAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * 10 * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 11); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.MONTH, 1); this.calendar.set(Calendar.DAY_OF_MONTH, 10); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void dailyTriggerInShortMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); this.calendar.set(Calendar.MONTH, 8); // September: 30 days this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.MONTH, 9); // October this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.DAY_OF_MONTH, 2); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void dailyTriggerInLongMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); this.calendar.set(Calendar.MONTH, 7); // August: 31 days and not a daylight saving boundary this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.DAY_OF_MONTH, 31); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.MONTH, 8); // September this.calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void dailyTriggerOnDaylightSavingBoundary(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); this.calendar.set(Calendar.MONTH, 9); // October: 31 days and a daylight saving boundary in CET this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.DAY_OF_MONTH, 31); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.MONTH, 10); // November this.calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.MONTH, 10); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.MONTH, 11); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementMonthAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); this.calendar.set(Calendar.MONTH, 11); this.calendar.set(Calendar.DAY_OF_MONTH, 31); this.calendar.set(Calendar.YEAR, 2010); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.MONTH, 0); this.calendar.set(Calendar.YEAR, 2011); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void monthlyTriggerInLongMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 31 * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.DAY_OF_MONTH, 31); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void monthlyTriggerInShortMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.MONTH, 10); this.calendar.set(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void incrementDayOfWeekByOne(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * * * 2", timeZone); this.calendar.set(Calendar.DAY_OF_WEEK, 2); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.DAY_OF_WEEK, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); assertThat(this.calendar.get(Calendar.DAY_OF_WEEK)).isEqualTo(Calendar.TUESDAY); } @ParameterizedCronTriggerTest void incrementDayOfWeekAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * * * 2", timeZone); this.calendar.set(Calendar.DAY_OF_WEEK, 4); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.DAY_OF_MONTH, 6); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); assertThat(this.calendar.get(Calendar.DAY_OF_WEEK)).isEqualTo(Calendar.TUESDAY); } @ParameterizedCronTriggerTest void specificMinuteSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("55 5 * * * *", timeZone); this.calendar.set(Calendar.MINUTE, 4); this.calendar.set(Calendar.SECOND, 54); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.add(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 55); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.HOUR, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void specificHourSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("55 * 10 * * *", timeZone); this.calendar.set(Calendar.HOUR_OF_DAY, 9); this.calendar.set(Calendar.SECOND, 54); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.add(Calendar.HOUR_OF_DAY, 1); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 55); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.MINUTE, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void specificMinuteHour(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* 5 10 * * *", timeZone); this.calendar.set(Calendar.MINUTE, 4); this.calendar.set(Calendar.HOUR_OF_DAY, 9); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.MINUTE, 1); this.calendar.add(Calendar.HOUR_OF_DAY, 1); this.calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(localDate); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); // next trigger is in one second because second is wildcard this.calendar.add(Calendar.SECOND, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void specificDayOfMonthSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("55 * * 3 * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 2); this.calendar.set(Calendar.SECOND, 54); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.add(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 55); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.MINUTE, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void specificDate(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * 3 11 *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 2); this.calendar.set(Calendar.MONTH, 9); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.add(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MONTH, 10); // 10=November this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.SECOND, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void nonExistentSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); // TODO: maybe try and detect this as a special case in parser? CronTrigger trigger = new CronTrigger("0 0 0 31 6 *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 10); this.calendar.set(Calendar.MONTH, 2); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context1)).isNull(); } @ParameterizedCronTriggerTest void leapYearSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 29 2 *", timeZone); this.calendar.set(Calendar.YEAR, 2007); this.calendar.set(Calendar.DAY_OF_MONTH, 10); this.calendar.set(Calendar.MONTH, 1); // 2=February Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.set(Calendar.YEAR, 2008); this.calendar.set(Calendar.DAY_OF_MONTH, 29); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.YEAR, 4); TriggerContext context2 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void weekDaySequence(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 7 ? * MON-FRI", timeZone); // This is a Saturday this.calendar.set(2009, 8, 26); Date localDate = this.calendar.getTime(); // 7 am is the trigger time this.calendar.set(Calendar.HOUR_OF_DAY, 7); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); // Add two days because we start on Saturday this.calendar.add(Calendar.DAY_OF_MONTH, 2); TriggerContext context1 = getTriggerContext(localDate); Object actual2 = localDate = trigger.nextExecutionTime(context1); assertThat(actual2).isEqualTo(this.calendar.getTime()); // Next day is a week day so add one this.calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual1 = localDate = trigger.nextExecutionTime(context2); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context3 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context3); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void dayOfWeekIndifferent(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * 2 * *", timeZone); CronTrigger trigger2 = new CronTrigger("* * * 2 * ?", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void secondIncrementer(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("57,59 * * * * *", timeZone); CronTrigger trigger2 = new CronTrigger("57/2 * * * * *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void secondIncrementerWithRange(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("1,3,5 * * * * *", timeZone); CronTrigger trigger2 = new CronTrigger("1-6/2 * * * * *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void hourIncrementer(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * 4,8,12,16,20 * * *", timeZone); CronTrigger trigger2 = new CronTrigger("* * 4/4 * * *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void dayNames(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * * 0-6", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * * TUE,WED,THU,FRI,SAT,SUN,MON", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void sundayIsZero(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * * 0", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * * SUN", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void sundaySynonym(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * * 0", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * * 7", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void monthNames(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * 1-12 *", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * FEB,JAN,MAR,APR,MAY,JUN,JUL,AUG,SEP,OCT,NOV,DEC *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void monthNamesMixedCase(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * 2 *", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * Feb *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void secondInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("77 * * * * *", timeZone)); } @ParameterizedCronTriggerTest void secondRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("44-77 * * * * *", timeZone)); } @ParameterizedCronTriggerTest void minuteInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* 77 * * * *", timeZone)); } @ParameterizedCronTriggerTest void minuteRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* 44-77 * * * *", timeZone)); } @ParameterizedCronTriggerTest void hourInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * 27 * * *", timeZone)); } @ParameterizedCronTriggerTest void hourRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * 23-28 * * *", timeZone)); } @ParameterizedCronTriggerTest void dayInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * 45 * *", timeZone)); } @ParameterizedCronTriggerTest void dayRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * 28-45 * *", timeZone)); } @ParameterizedCronTriggerTest void monthInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 25 13 ?", timeZone)); } @ParameterizedCronTriggerTest void monthInvalidTooSmall(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 25 0 ?", timeZone)); } @ParameterizedCronTriggerTest void dayOfMonthInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 32 12 ?", timeZone)); } @ParameterizedCronTriggerTest void monthRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * * 11-13 *", timeZone)); } @ParameterizedCronTriggerTest void whitespace(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * 1 *", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * 1 *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void monthSequence(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 30 23 30 1/3 ?", timeZone); this.calendar.set(2010, 11, 30); Date localDate = this.calendar.getTime(); // set expected next trigger time this.calendar.set(Calendar.HOUR_OF_DAY, 23); this.calendar.set(Calendar.MINUTE, 30); this.calendar.set(Calendar.SECOND, 0); this.calendar.add(Calendar.MONTH, 1); TriggerContext context1 = getTriggerContext(localDate); Object actual2 = localDate = trigger.nextExecutionTime(context1); assertThat(actual2).isEqualTo(this.calendar.getTime()); // Next trigger is 3 months latter this.calendar.add(Calendar.MONTH, 3); TriggerContext context2 = getTriggerContext(localDate); Object actual1 = localDate = trigger.nextExecutionTime(context2); assertThat(actual1).isEqualTo(this.calendar.getTime()); // Next trigger is 3 months latter this.calendar.add(Calendar.MONTH, 3); TriggerContext context3 = getTriggerContext(localDate); Object actual = trigger.nextExecutionTime(context3); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void daylightSavingMissingHour(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); // This trigger has to be somewhere between 2:00 AM and 3:00 AM, so we // use a cron expression for 2:10 AM every day. CronTrigger trigger = new CronTrigger("0 10 2 * * *", timeZone); // 2:00 AM on March 31, 2013: start of Daylight Saving Time for CET in 2013. // Setting up last completion: // - PST: Sun Mar 31 10:10:54 CEST 2013 // - CET: Sun Mar 31 01:10:54 CET 2013 this.calendar.set(Calendar.DAY_OF_MONTH, 31); this.calendar.set(Calendar.MONTH, Calendar.MARCH); this.calendar.set(Calendar.YEAR, 2013); this.calendar.set(Calendar.HOUR_OF_DAY, 1); this.calendar.set(Calendar.MINUTE, 10); // changing to any minute from 0-9 causes the test to fail for CET. this.calendar.set(Calendar.SECOND, 54); Date lastCompletionTime = this.calendar.getTime(); // Setting up expected next execution time: // - PST: Sun Mar 31 11:10:00 CEST 2013 // - CET: Mon Apr 01 02:10:00 CEST 2013 if (timeZone.equals(TimeZone.getTimeZone("CET"))) { // Clocks go forward an hour so 2am doesn't exist in CET for this localDateTime this.calendar.add(Calendar.DAY_OF_MONTH, 1); } this.calendar.add(Calendar.HOUR_OF_DAY, 1); this.calendar.set(Calendar.MINUTE, 10); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(lastCompletionTime); Object nextExecutionTime = trigger.nextExecutionTime(context); assertThat(nextExecutionTime).isEqualTo(this.calendar.getTime()); } private static void roundup(Calendar calendar) { calendar.add(Calendar.SECOND, 1); calendar.set(Calendar.MILLISECOND, 0); } private static void assertMatchesNextSecond(CronTrigger trigger, Calendar calendar) { Date localDateTime = calendar.getTime(); roundup(calendar); TriggerContext context = getTriggerContext(localDateTime); assertThat(trigger.nextExecutionTime(context)).isEqualTo(calendar.getTime()); } private static TriggerContext getTriggerContext(Date lastCompletionTime) { SimpleTriggerContext context = new SimpleTriggerContext(); context.update(null, null, lastCompletionTime); return context; } @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) @ParameterizedTest(name = "[{index}] localDateTime[{0}], time zone[{1}]") @MethodSource("parameters") @interface ParameterizedCronTriggerTest { } static Stream<Arguments> parameters() { return Stream.of( arguments(LocalDateTime.now(), TimeZone.getTimeZone("PST")), arguments(LocalDateTime.now(), TimeZone.getTimeZone("CET")) ); } }
spring-context/src/test/java/org/springframework/scheduling/support/CronTriggerTests.java
/* * Copyright 2002-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.scheduling.support; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.TimeZone; import java.util.stream.Stream; import org.joda.time.LocalDateTime; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.springframework.scheduling.TriggerContext; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; import static org.junit.jupiter.params.provider.Arguments.arguments; /** * Unit tests for {@link CronTrigger}. * * @author Dave Syer * @author Mark Fisher * @author Juergen Hoeller * @author Sam Brannen */ class CronTriggerTests { private final Calendar calendar = new GregorianCalendar(); private void setUp(LocalDateTime localDateTime, TimeZone timeZone) { this.calendar.setTimeZone(timeZone); this.calendar.setTime(localDateTime.toDate()); roundup(this.calendar); } @ParameterizedCronTriggerTest void testMatchAll(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * * * *", timeZone); TriggerContext context = getTriggerContext(localDateTime.toDate()); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testMatchLastSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * * * *", timeZone); GregorianCalendar calendar = new GregorianCalendar(); calendar.set(Calendar.SECOND, 58); assertMatchesNextSecond(trigger, calendar); } @ParameterizedCronTriggerTest void testMatchSpecificSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("10 * * * * *", timeZone); GregorianCalendar calendar = new GregorianCalendar(); calendar.set(Calendar.SECOND, 9); assertMatchesNextSecond(trigger, calendar); } @ParameterizedCronTriggerTest void testIncrementSecondByOne(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("11 * * * * *", timeZone); this.calendar.set(Calendar.SECOND, 10); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.SECOND, 1); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementSecondWithPreviousExecutionTooEarly(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("11 * * * * *", timeZone); this.calendar.set(Calendar.SECOND, 11); SimpleTriggerContext context = new SimpleTriggerContext(); context.update(this.calendar.getTime(), new Date(this.calendar.getTimeInMillis() - 100), new Date(this.calendar.getTimeInMillis() - 90)); this.calendar.add(Calendar.MINUTE, 1); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementSecondAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("10 * * * * *", timeZone); this.calendar.set(Calendar.SECOND, 11); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.SECOND, 59); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testSecondRange(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("10-15 * * * * *", timeZone); this.calendar.set(Calendar.SECOND, 9); assertMatchesNextSecond(trigger, this.calendar); this.calendar.set(Calendar.SECOND, 14); assertMatchesNextSecond(trigger, this.calendar); } @ParameterizedCronTriggerTest void testIncrementMinute(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 * * * * *", timeZone); this.calendar.set(Calendar.MINUTE, 10); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(localDate); localDate = trigger.nextExecutionTime(context1); assertThat(localDate).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.MINUTE, 1); TriggerContext context2 = getTriggerContext(localDate); localDate = trigger.nextExecutionTime(context2); assertThat(localDate).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementMinuteByOne(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 11 * * * *", timeZone); this.calendar.set(Calendar.MINUTE, 10); TriggerContext context = getTriggerContext(this.calendar.getTime()); this.calendar.add(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 0); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementMinuteAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 10 * * * *", timeZone); this.calendar.set(Calendar.MINUTE, 11); this.calendar.set(Calendar.SECOND, 0); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.MINUTE, 59); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementHour(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 * * * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 30); this.calendar.set(Calendar.HOUR_OF_DAY, 11); this.calendar.set(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 0); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.HOUR_OF_DAY, 12); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.HOUR_OF_DAY, 13); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementHourAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 * * * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 10); this.calendar.set(Calendar.HOUR_OF_DAY, 23); this.calendar.set(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 0); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.DAY_OF_MONTH, 11); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.HOUR_OF_DAY, 1); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementDayOfMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 1); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(localDate); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); assertThat(this.calendar.get(Calendar.DAY_OF_MONTH)).isEqualTo(2); this.calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); assertThat(this.calendar.get(Calendar.DAY_OF_MONTH)).isEqualTo(3); } @ParameterizedCronTriggerTest void testIncrementDayOfMonthByOne(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * 10 * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 9); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementDayOfMonthAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * 10 * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 11); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.MONTH, 1); this.calendar.set(Calendar.DAY_OF_MONTH, 10); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testDailyTriggerInShortMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); this.calendar.set(Calendar.MONTH, 8); // September: 30 days this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.MONTH, 9); // October this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.DAY_OF_MONTH, 2); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testDailyTriggerInLongMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); this.calendar.set(Calendar.MONTH, 7); // August: 31 days and not a daylight saving boundary this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.DAY_OF_MONTH, 31); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.MONTH, 8); // September this.calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testDailyTriggerOnDaylightSavingBoundary(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); this.calendar.set(Calendar.MONTH, 9); // October: 31 days and a daylight saving boundary in CET this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.DAY_OF_MONTH, 31); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.MONTH, 10); // November this.calendar.set(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.MONTH, 10); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.MONTH, 11); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementMonthAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); this.calendar.set(Calendar.MONTH, 11); this.calendar.set(Calendar.DAY_OF_MONTH, 31); this.calendar.set(Calendar.YEAR, 2010); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); this.calendar.set(Calendar.MONTH, 0); this.calendar.set(Calendar.YEAR, 2011); TriggerContext context1 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); this.calendar.set(Calendar.MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context2)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testMonthlyTriggerInLongMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 31 * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.DAY_OF_MONTH, 31); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testMonthlyTriggerInShortMonth(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); this.calendar.set(Calendar.MONTH, 9); this.calendar.set(Calendar.DAY_OF_MONTH, 30); Date localDate = this.calendar.getTime(); this.calendar.set(Calendar.MONTH, 10); this.calendar.set(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testIncrementDayOfWeekByOne(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * * * 2", timeZone); this.calendar.set(Calendar.DAY_OF_WEEK, 2); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.DAY_OF_WEEK, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); assertThat(this.calendar.get(Calendar.DAY_OF_WEEK)).isEqualTo(Calendar.TUESDAY); } @ParameterizedCronTriggerTest void testIncrementDayOfWeekAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * * * 2", timeZone); this.calendar.set(Calendar.DAY_OF_WEEK, 4); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.DAY_OF_MONTH, 6); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); TriggerContext context = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context)).isEqualTo(this.calendar.getTime()); assertThat(this.calendar.get(Calendar.DAY_OF_WEEK)).isEqualTo(Calendar.TUESDAY); } @ParameterizedCronTriggerTest void testSpecificMinuteSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("55 5 * * * *", timeZone); this.calendar.set(Calendar.MINUTE, 4); this.calendar.set(Calendar.SECOND, 54); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.add(Calendar.MINUTE, 1); this.calendar.set(Calendar.SECOND, 55); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.HOUR, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testSpecificHourSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("55 * 10 * * *", timeZone); this.calendar.set(Calendar.HOUR_OF_DAY, 9); this.calendar.set(Calendar.SECOND, 54); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.add(Calendar.HOUR_OF_DAY, 1); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 55); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.MINUTE, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testSpecificMinuteHour(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* 5 10 * * *", timeZone); this.calendar.set(Calendar.MINUTE, 4); this.calendar.set(Calendar.HOUR_OF_DAY, 9); Date localDate = this.calendar.getTime(); this.calendar.add(Calendar.MINUTE, 1); this.calendar.add(Calendar.HOUR_OF_DAY, 1); this.calendar.set(Calendar.SECOND, 0); TriggerContext context1 = getTriggerContext(localDate); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); // next trigger is in one second because second is wildcard this.calendar.add(Calendar.SECOND, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testSpecificDayOfMonthSecond(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("55 * * 3 * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 2); this.calendar.set(Calendar.SECOND, 54); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.add(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 55); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.MINUTE, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("* * * 3 11 *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 2); this.calendar.set(Calendar.MONTH, 9); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.add(Calendar.DAY_OF_MONTH, 1); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MONTH, 10); // 10=November this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.SECOND, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testNonExistentSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); // TODO: maybe try and detect this as a special case in parser? CronTrigger trigger = new CronTrigger("0 0 0 31 6 *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 10); this.calendar.set(Calendar.MONTH, 2); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); assertThat(trigger.nextExecutionTime(context1)).isNull(); } @ParameterizedCronTriggerTest void testLeapYearSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 0 29 2 *", timeZone); this.calendar.set(Calendar.YEAR, 2007); this.calendar.set(Calendar.DAY_OF_MONTH, 10); this.calendar.set(Calendar.MONTH, 1); // 2=February Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); this.calendar.set(Calendar.YEAR, 2008); this.calendar.set(Calendar.DAY_OF_MONTH, 29); this.calendar.set(Calendar.HOUR_OF_DAY, 0); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); Object actual1 = localDate = trigger.nextExecutionTime(context1); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.YEAR, 4); TriggerContext context2 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context2); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testWeekDaySequence(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 0 7 ? * MON-FRI", timeZone); // This is a Saturday this.calendar.set(2009, 8, 26); Date localDate = this.calendar.getTime(); // 7 am is the trigger time this.calendar.set(Calendar.HOUR_OF_DAY, 7); this.calendar.set(Calendar.MINUTE, 0); this.calendar.set(Calendar.SECOND, 0); // Add two days because we start on Saturday this.calendar.add(Calendar.DAY_OF_MONTH, 2); TriggerContext context1 = getTriggerContext(localDate); Object actual2 = localDate = trigger.nextExecutionTime(context1); assertThat(actual2).isEqualTo(this.calendar.getTime()); // Next day is a week day so add one this.calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context2 = getTriggerContext(localDate); Object actual1 = localDate = trigger.nextExecutionTime(context2); assertThat(actual1).isEqualTo(this.calendar.getTime()); this.calendar.add(Calendar.DAY_OF_MONTH, 1); TriggerContext context3 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context3); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testDayOfWeekIndifferent(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * 2 * *", timeZone); CronTrigger trigger2 = new CronTrigger("* * * 2 * ?", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testSecondIncrementer(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("57,59 * * * * *", timeZone); CronTrigger trigger2 = new CronTrigger("57/2 * * * * *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testSecondIncrementerWithRange(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("1,3,5 * * * * *", timeZone); CronTrigger trigger2 = new CronTrigger("1-6/2 * * * * *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testHourIncrementer(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * 4,8,12,16,20 * * *", timeZone); CronTrigger trigger2 = new CronTrigger("* * 4/4 * * *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testDayNames(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * * 0-6", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * * TUE,WED,THU,FRI,SAT,SUN,MON", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testSundayIsZero(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * * 0", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * * SUN", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testSundaySynonym(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * * 0", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * * 7", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testMonthNames(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * 1-12 *", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * FEB,JAN,MAR,APR,MAY,JUN,JUL,AUG,SEP,OCT,NOV,DEC *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testMonthNamesMixedCase(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * 2 *", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * Feb *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testSecondInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("77 * * * * *", timeZone)); } @ParameterizedCronTriggerTest void testSecondRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("44-77 * * * * *", timeZone)); } @ParameterizedCronTriggerTest void testMinuteInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* 77 * * * *", timeZone)); } @ParameterizedCronTriggerTest void testMinuteRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* 44-77 * * * *", timeZone)); } @ParameterizedCronTriggerTest void testHourInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * 27 * * *", timeZone)); } @ParameterizedCronTriggerTest void testHourRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * 23-28 * * *", timeZone)); } @ParameterizedCronTriggerTest void testDayInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * 45 * *", timeZone)); } @ParameterizedCronTriggerTest void testDayRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * 28-45 * *", timeZone)); } @ParameterizedCronTriggerTest void testMonthInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 25 13 ?", timeZone)); } @ParameterizedCronTriggerTest void testMonthInvalidTooSmall(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 25 0 ?", timeZone)); } @ParameterizedCronTriggerTest void testDayOfMonthInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 32 12 ?", timeZone)); } @ParameterizedCronTriggerTest void testMonthRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * * 11-13 *", timeZone)); } @ParameterizedCronTriggerTest void testWhitespace(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger1 = new CronTrigger("* * * * 1 *", timeZone); CronTrigger trigger2 = new CronTrigger("* * * * 1 *", timeZone); assertThat(trigger2).isEqualTo(trigger1); } @ParameterizedCronTriggerTest void testMonthSequence(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); CronTrigger trigger = new CronTrigger("0 30 23 30 1/3 ?", timeZone); this.calendar.set(2010, 11, 30); Date localDate = this.calendar.getTime(); // set expected next trigger time this.calendar.set(Calendar.HOUR_OF_DAY, 23); this.calendar.set(Calendar.MINUTE, 30); this.calendar.set(Calendar.SECOND, 0); this.calendar.add(Calendar.MONTH, 1); TriggerContext context1 = getTriggerContext(localDate); Object actual2 = localDate = trigger.nextExecutionTime(context1); assertThat(actual2).isEqualTo(this.calendar.getTime()); // Next trigger is 3 months latter this.calendar.add(Calendar.MONTH, 3); TriggerContext context2 = getTriggerContext(localDate); Object actual1 = localDate = trigger.nextExecutionTime(context2); assertThat(actual1).isEqualTo(this.calendar.getTime()); // Next trigger is 3 months latter this.calendar.add(Calendar.MONTH, 3); TriggerContext context3 = getTriggerContext(localDate); Object actual = localDate = trigger.nextExecutionTime(context3); assertThat(actual).isEqualTo(this.calendar.getTime()); } @ParameterizedCronTriggerTest void testDaylightSavingMissingHour(LocalDateTime localDateTime, TimeZone timeZone) { setUp(localDateTime, timeZone); // This trigger has to be somewhere in between 2am and 3am CronTrigger trigger = new CronTrigger("0 10 2 * * *", timeZone); this.calendar.set(Calendar.DAY_OF_MONTH, 31); this.calendar.set(Calendar.MONTH, Calendar.MARCH); this.calendar.set(Calendar.YEAR, 2013); this.calendar.set(Calendar.HOUR_OF_DAY, 1); this.calendar.set(Calendar.SECOND, 54); Date localDate = this.calendar.getTime(); TriggerContext context1 = getTriggerContext(localDate); if (timeZone.equals(TimeZone.getTimeZone("CET"))) { // Clocks go forward an hour so 2am doesn't exist in CET for this localDateTime this.calendar.add(Calendar.DAY_OF_MONTH, 1); } this.calendar.add(Calendar.HOUR_OF_DAY, 1); this.calendar.set(Calendar.MINUTE, 10); this.calendar.set(Calendar.SECOND, 0); Object actual = localDate = trigger.nextExecutionTime(context1); assertThat(actual).isEqualTo(this.calendar.getTime()); } private static void roundup(Calendar calendar) { calendar.add(Calendar.SECOND, 1); calendar.set(Calendar.MILLISECOND, 0); } private static void assertMatchesNextSecond(CronTrigger trigger, Calendar calendar) { Date localDateTime = calendar.getTime(); roundup(calendar); TriggerContext context = getTriggerContext(localDateTime); assertThat(trigger.nextExecutionTime(context)).isEqualTo(calendar.getTime()); } private static TriggerContext getTriggerContext(Date lastCompletionTime) { SimpleTriggerContext context = new SimpleTriggerContext(); context.update(null, null, lastCompletionTime); return context; } @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.METHOD) @ParameterizedTest(name = "[{index}] localDateTime[{0}], time zone[{1}]") @MethodSource("parameters") @interface ParameterizedCronTriggerTest { } static Stream<Arguments> parameters() { return Stream.of( arguments(LocalDateTime.now(), TimeZone.getTimeZone("PST")), arguments(LocalDateTime.now(), TimeZone.getTimeZone("CET")) ); } }
Polish CronTriggerTests and assess daylight savings time issue This test polishes CronTriggerTests and modifies the daylightSavingMissingHour() test to help analyze why the test fails for CET if the minute value for the last completion time falls between 0 and 9 minutes. Associated broken build: https://ge.spring.io/s/epphj7vruwcn6/tests/:spring-context:test/org.springframework.scheduling.support.CronTriggerTests/testDaylightSavingMissingHour(LocalDateTime,%20TimeZone)%5B2%5D?expanded-stacktrace=WyIwIl0
spring-context/src/test/java/org/springframework/scheduling/support/CronTriggerTests.java
Polish CronTriggerTests and assess daylight savings time issue
<ide><path>pring-context/src/test/java/org/springframework/scheduling/support/CronTriggerTests.java <ide> /* <del> * Copyright 2002-2019 the original author or authors. <add> * Copyright 2002-2021 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> <ide> @ParameterizedCronTriggerTest <del> void testMatchAll(LocalDateTime localDateTime, TimeZone timeZone) { <add> void matchAll(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("* * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMatchLastSecond(LocalDateTime localDateTime, TimeZone timeZone) { <add> void matchLastSecond(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("* * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMatchSpecificSecond(LocalDateTime localDateTime, TimeZone timeZone) { <add> void matchSpecificSecond(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("10 * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementSecondByOne(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementSecondByOne(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("11 * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementSecondWithPreviousExecutionTooEarly(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementSecondWithPreviousExecutionTooEarly(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("11 * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementSecondAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementSecondAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("10 * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testSecondRange(LocalDateTime localDateTime, TimeZone timeZone) { <add> void secondRange(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("10-15 * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementMinute(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementMinute(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementMinuteByOne(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementMinuteByOne(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 11 * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementMinuteAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementMinuteAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 10 * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementHour(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementHour(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementHourAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementHourAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementDayOfMonth(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementDayOfMonth(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); <ide> assertThat(this.calendar.get(Calendar.DAY_OF_MONTH)).isEqualTo(2); <ide> this.calendar.add(Calendar.DAY_OF_MONTH, 1); <ide> TriggerContext context2 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context2); <add> Object actual = trigger.nextExecutionTime(context2); <ide> assertThat(actual).isEqualTo(this.calendar.getTime()); <ide> assertThat(this.calendar.get(Calendar.DAY_OF_MONTH)).isEqualTo(3); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementDayOfMonthByOne(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementDayOfMonthByOne(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("* * * 10 * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementDayOfMonthAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementDayOfMonthAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("* * * 10 * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testDailyTriggerInShortMonth(LocalDateTime localDateTime, TimeZone timeZone) { <add> void dailyTriggerInShortMonth(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testDailyTriggerInLongMonth(LocalDateTime localDateTime, TimeZone timeZone) { <add> void dailyTriggerInLongMonth(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testDailyTriggerOnDaylightSavingBoundary(LocalDateTime localDateTime, TimeZone timeZone) { <add> void dailyTriggerOnDaylightSavingBoundary(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementMonth(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementMonth(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementMonthAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementMonthAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMonthlyTriggerInLongMonth(LocalDateTime localDateTime, TimeZone timeZone) { <add> void monthlyTriggerInLongMonth(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 31 * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMonthlyTriggerInShortMonth(LocalDateTime localDateTime, TimeZone timeZone) { <add> void monthlyTriggerInShortMonth(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 1 * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementDayOfWeekByOne(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementDayOfWeekByOne(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("* * * * * 2", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testIncrementDayOfWeekAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <add> void incrementDayOfWeekAndRollover(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("* * * * * 2", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testSpecificMinuteSecond(LocalDateTime localDateTime, TimeZone timeZone) { <add> void specificMinuteSecond(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("55 5 * * * *", timeZone); <ide> assertThat(actual1).isEqualTo(this.calendar.getTime()); <ide> this.calendar.add(Calendar.HOUR, 1); <ide> TriggerContext context2 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context2); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <del> } <del> <del> @ParameterizedCronTriggerTest <del> void testSpecificHourSecond(LocalDateTime localDateTime, TimeZone timeZone) { <add> Object actual = trigger.nextExecutionTime(context2); <add> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> } <add> <add> @ParameterizedCronTriggerTest <add> void specificHourSecond(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("55 * 10 * * *", timeZone); <ide> assertThat(actual1).isEqualTo(this.calendar.getTime()); <ide> this.calendar.add(Calendar.MINUTE, 1); <ide> TriggerContext context2 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context2); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <del> } <del> <del> @ParameterizedCronTriggerTest <del> void testSpecificMinuteHour(LocalDateTime localDateTime, TimeZone timeZone) { <add> Object actual = trigger.nextExecutionTime(context2); <add> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> } <add> <add> @ParameterizedCronTriggerTest <add> void specificMinuteHour(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("* 5 10 * * *", timeZone); <ide> // next trigger is in one second because second is wildcard <ide> this.calendar.add(Calendar.SECOND, 1); <ide> TriggerContext context2 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context2); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <del> } <del> <del> @ParameterizedCronTriggerTest <del> void testSpecificDayOfMonthSecond(LocalDateTime localDateTime, TimeZone timeZone) { <add> Object actual = trigger.nextExecutionTime(context2); <add> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> } <add> <add> @ParameterizedCronTriggerTest <add> void specificDayOfMonthSecond(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("55 * * 3 * *", timeZone); <ide> assertThat(actual1).isEqualTo(this.calendar.getTime()); <ide> this.calendar.add(Calendar.MINUTE, 1); <ide> TriggerContext context2 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context2); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <del> } <del> <del> @ParameterizedCronTriggerTest <del> void testSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { <add> Object actual = trigger.nextExecutionTime(context2); <add> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> } <add> <add> @ParameterizedCronTriggerTest <add> void specificDate(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("* * * 3 11 *", timeZone); <ide> assertThat(actual1).isEqualTo(this.calendar.getTime()); <ide> this.calendar.add(Calendar.SECOND, 1); <ide> TriggerContext context2 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context2); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <del> } <del> <del> @ParameterizedCronTriggerTest <del> void testNonExistentSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { <add> Object actual = trigger.nextExecutionTime(context2); <add> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> } <add> <add> @ParameterizedCronTriggerTest <add> void nonExistentSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> // TODO: maybe try and detect this as a special case in parser? <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testLeapYearSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { <add> void leapYearSpecificDate(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 0 29 2 *", timeZone); <ide> assertThat(actual1).isEqualTo(this.calendar.getTime()); <ide> this.calendar.add(Calendar.YEAR, 4); <ide> TriggerContext context2 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context2); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <del> } <del> <del> @ParameterizedCronTriggerTest <del> void testWeekDaySequence(LocalDateTime localDateTime, TimeZone timeZone) { <add> Object actual = trigger.nextExecutionTime(context2); <add> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> } <add> <add> @ParameterizedCronTriggerTest <add> void weekDaySequence(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 0 7 ? * MON-FRI", timeZone); <ide> assertThat(actual1).isEqualTo(this.calendar.getTime()); <ide> this.calendar.add(Calendar.DAY_OF_MONTH, 1); <ide> TriggerContext context3 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context3); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <del> } <del> <del> @ParameterizedCronTriggerTest <del> void testDayOfWeekIndifferent(LocalDateTime localDateTime, TimeZone timeZone) { <add> Object actual = trigger.nextExecutionTime(context3); <add> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> } <add> <add> @ParameterizedCronTriggerTest <add> void dayOfWeekIndifferent(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("* * * 2 * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testSecondIncrementer(LocalDateTime localDateTime, TimeZone timeZone) { <add> void secondIncrementer(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("57,59 * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testSecondIncrementerWithRange(LocalDateTime localDateTime, TimeZone timeZone) { <add> void secondIncrementerWithRange(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("1,3,5 * * * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testHourIncrementer(LocalDateTime localDateTime, TimeZone timeZone) { <add> void hourIncrementer(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("* * 4,8,12,16,20 * * *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testDayNames(LocalDateTime localDateTime, TimeZone timeZone) { <add> void dayNames(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("* * * * * 0-6", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testSundayIsZero(LocalDateTime localDateTime, TimeZone timeZone) { <add> void sundayIsZero(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("* * * * * 0", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testSundaySynonym(LocalDateTime localDateTime, TimeZone timeZone) { <add> void sundaySynonym(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("* * * * * 0", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMonthNames(LocalDateTime localDateTime, TimeZone timeZone) { <add> void monthNames(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("* * * * 1-12 *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMonthNamesMixedCase(LocalDateTime localDateTime, TimeZone timeZone) { <add> void monthNamesMixedCase(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("* * * * 2 *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testSecondInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void secondInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("77 * * * * *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testSecondRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void secondRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("44-77 * * * * *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMinuteInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void minuteInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* 77 * * * *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMinuteRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void minuteRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* 44-77 * * * *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testHourInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void hourInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * 27 * * *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testHourRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void hourRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * 23-28 * * *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testDayInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void dayInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * 45 * *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testDayRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void dayRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * 28-45 * *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMonthInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void monthInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 25 13 ?", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMonthInvalidTooSmall(LocalDateTime localDateTime, TimeZone timeZone) { <add> void monthInvalidTooSmall(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 25 0 ?", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testDayOfMonthInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void dayOfMonthInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("0 0 0 32 12 ?", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMonthRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <add> void monthRangeInvalid(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> assertThatIllegalArgumentException().isThrownBy(() -> new CronTrigger("* * * * 11-13 *", timeZone)); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testWhitespace(LocalDateTime localDateTime, TimeZone timeZone) { <add> void whitespace(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger1 = new CronTrigger("* * * * 1 *", timeZone); <ide> } <ide> <ide> @ParameterizedCronTriggerTest <del> void testMonthSequence(LocalDateTime localDateTime, TimeZone timeZone) { <add> void monthSequence(LocalDateTime localDateTime, TimeZone timeZone) { <ide> setUp(localDateTime, timeZone); <ide> <ide> CronTrigger trigger = new CronTrigger("0 30 23 30 1/3 ?", timeZone); <ide> // Next trigger is 3 months latter <ide> this.calendar.add(Calendar.MONTH, 3); <ide> TriggerContext context3 = getTriggerContext(localDate); <del> Object actual = localDate = trigger.nextExecutionTime(context3); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <del> } <del> <del> @ParameterizedCronTriggerTest <del> void testDaylightSavingMissingHour(LocalDateTime localDateTime, TimeZone timeZone) { <del> setUp(localDateTime, timeZone); <del> <del> // This trigger has to be somewhere in between 2am and 3am <add> Object actual = trigger.nextExecutionTime(context3); <add> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> } <add> <add> @ParameterizedCronTriggerTest <add> void daylightSavingMissingHour(LocalDateTime localDateTime, TimeZone timeZone) { <add> setUp(localDateTime, timeZone); <add> <add> // This trigger has to be somewhere between 2:00 AM and 3:00 AM, so we <add> // use a cron expression for 2:10 AM every day. <ide> CronTrigger trigger = new CronTrigger("0 10 2 * * *", timeZone); <add> <add> // 2:00 AM on March 31, 2013: start of Daylight Saving Time for CET in 2013. <add> // Setting up last completion: <add> // - PST: Sun Mar 31 10:10:54 CEST 2013 <add> // - CET: Sun Mar 31 01:10:54 CET 2013 <ide> this.calendar.set(Calendar.DAY_OF_MONTH, 31); <ide> this.calendar.set(Calendar.MONTH, Calendar.MARCH); <ide> this.calendar.set(Calendar.YEAR, 2013); <ide> this.calendar.set(Calendar.HOUR_OF_DAY, 1); <add> this.calendar.set(Calendar.MINUTE, 10); // changing to any minute from 0-9 causes the test to fail for CET. <ide> this.calendar.set(Calendar.SECOND, 54); <del> Date localDate = this.calendar.getTime(); <del> TriggerContext context1 = getTriggerContext(localDate); <add> Date lastCompletionTime = this.calendar.getTime(); <add> <add> // Setting up expected next execution time: <add> // - PST: Sun Mar 31 11:10:00 CEST 2013 <add> // - CET: Mon Apr 01 02:10:00 CEST 2013 <ide> if (timeZone.equals(TimeZone.getTimeZone("CET"))) { <ide> // Clocks go forward an hour so 2am doesn't exist in CET for this localDateTime <ide> this.calendar.add(Calendar.DAY_OF_MONTH, 1); <ide> this.calendar.add(Calendar.HOUR_OF_DAY, 1); <ide> this.calendar.set(Calendar.MINUTE, 10); <ide> this.calendar.set(Calendar.SECOND, 0); <del> Object actual = localDate = trigger.nextExecutionTime(context1); <del> assertThat(actual).isEqualTo(this.calendar.getTime()); <add> <add> TriggerContext context = getTriggerContext(lastCompletionTime); <add> Object nextExecutionTime = trigger.nextExecutionTime(context); <add> assertThat(nextExecutionTime).isEqualTo(this.calendar.getTime()); <ide> } <ide> <ide> private static void roundup(Calendar calendar) {
Java
apache-2.0
ffc45e50fb681da39786218ad4dd780a0e4fc30e
0
nroduit/XChart,timmolter/XChart
/** * Copyright 2015-2016 Knowm Inc. (http://knowm.org) and contributors. * Copyright 2011-2015 Xeiam LLC (http://xeiam.com) and contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.knowm.xchart; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.swing.AbstractAction; import javax.swing.JFileChooser; import javax.swing.JMenuItem; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.KeyStroke; import javax.swing.filechooser.FileFilter; import org.knowm.xchart.BitmapEncoder.BitmapFormat; import org.knowm.xchart.VectorGraphicsEncoder.VectorGraphicsFormat; import org.knowm.xchart.internal.Series; import org.knowm.xchart.internal.Series_AxesChart; import org.knowm.xchart.internal.chartpart.Chart; /** * A Swing JPanel that contains a Chart * <p> * Right-click + Save As... or ctrl+S pops up a Save As dialog box for saving the chart as a JPeg or PNG file. * * @author timmolter */ public class XChartPanel<T extends Chart> extends JPanel { private final T chart; private final Dimension preferredSize; private String saveAsString = "Save As..."; /** * Constructor * * @param chart */ public XChartPanel(final T chart) { this.chart = chart; preferredSize = new Dimension(chart.getWidth(), chart.getHeight()); // Right-click listener for saving chart this.addMouseListener(new PopUpMenuClickListener()); // Control+S key listener for saving chart KeyStroke ctrlS = KeyStroke.getKeyStroke(KeyEvent.VK_S, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); this.getInputMap(WHEN_IN_FOCUSED_WINDOW).put(ctrlS, "save"); this.getActionMap().put("save", new SaveAction()); } /** * Set the "Save As..." String if you want to localize it. * * @param saveAsString */ public void setSaveAsString(String saveAsString) { this.saveAsString = saveAsString; } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2d = (Graphics2D) g.create(); chart.paint(g2d, getWidth(), getHeight()); g2d.dispose(); } public T getChart() { return this.chart; } @Override public Dimension getPreferredSize() { return this.preferredSize; } private class SaveAction extends AbstractAction { public SaveAction() { super("save"); } @Override public void actionPerformed(ActionEvent e) { showSaveAsDialog(); } } private void showSaveAsDialog() { JFileChooser fileChooser = new JFileChooser(); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("jpg")); FileFilter pngFileFilter = new SuffixSaveFilter("png"); fileChooser.addChoosableFileFilter(pngFileFilter); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("bmp")); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("gif")); // VectorGraphics2D is optional, so if it's on the classpath, allow saving charts as vector graphic try { Class.forName("de.erichseifert.vectorgraphics2d.VectorGraphics2D"); // it exists on the classpath fileChooser.addChoosableFileFilter(new SuffixSaveFilter("svg")); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("eps")); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("pdf")); } catch (ClassNotFoundException e) { // it does not exist on the classpath } fileChooser.setAcceptAllFileFilterUsed(false); fileChooser.setFileFilter(pngFileFilter); if (fileChooser.showSaveDialog(null) == JFileChooser.APPROVE_OPTION) { if (fileChooser.getSelectedFile() != null) { File theFileToSave = fileChooser.getSelectedFile(); try { if (fileChooser.getFileFilter() == null) { BitmapEncoder.saveBitmap(chart, theFileToSave.getCanonicalPath().toString(), BitmapFormat.PNG); } else if (fileChooser.getFileFilter().getDescription().equals("*.jpg,*.JPG")) { BitmapEncoder.saveJPGWithQuality(chart, BitmapEncoder.addFileExtension(theFileToSave.getCanonicalPath().toString(), BitmapFormat.JPG), 1.0f); } else if (fileChooser.getFileFilter().getDescription().equals("*.png,*.PNG")) { BitmapEncoder.saveBitmap(chart, theFileToSave.getCanonicalPath().toString(), BitmapFormat.PNG); } else if (fileChooser.getFileFilter().getDescription().equals("*.bmp,*.BMP")) { BitmapEncoder.saveBitmap(chart, theFileToSave.getCanonicalPath().toString(), BitmapFormat.BMP); } else if (fileChooser.getFileFilter().getDescription().equals("*.gif,*.GIF")) { BitmapEncoder.saveBitmap(chart, theFileToSave.getCanonicalPath().toString(), BitmapFormat.GIF); } else if (fileChooser.getFileFilter().getDescription().equals("*.svg,*.SVG")) { VectorGraphicsEncoder.saveVectorGraphic(chart, theFileToSave.getCanonicalPath().toString(), VectorGraphicsFormat.SVG); } else if (fileChooser.getFileFilter().getDescription().equals("*.eps,*.EPS")) { VectorGraphicsEncoder.saveVectorGraphic(chart, theFileToSave.getCanonicalPath().toString(), VectorGraphicsFormat.EPS); } else if (fileChooser.getFileFilter().getDescription().equals("*.pdf,*.PDF")) { VectorGraphicsEncoder.saveVectorGraphic(chart, theFileToSave.getCanonicalPath().toString(), VectorGraphicsFormat.PDF); } } catch (IOException e) { e.printStackTrace(); } } } } /** * File filter based on the suffix of a file. This file filter accepts all files that end with .suffix or the capitalized suffix. * * @author Benedikt Bünz */ private class SuffixSaveFilter extends FileFilter { private final String suffix; /** * @param suffix This file filter accepts all files that end with .suffix or the capitalized suffix. */ public SuffixSaveFilter(String suffix) { this.suffix = suffix; } @Override public boolean accept(File f) { if (f.isDirectory()) { return true; } String s = f.getName(); return s.endsWith("." + suffix) || s.endsWith("." + suffix.toUpperCase()); } @Override public String getDescription() { return "*." + suffix + ",*." + suffix.toUpperCase(); } } private class PopUpMenuClickListener extends MouseAdapter { @Override public void mousePressed(MouseEvent e) { if (e.isPopupTrigger()) { doPop(e); } } @Override public void mouseReleased(MouseEvent e) { if (e.isPopupTrigger()) { doPop(e); } } private void doPop(MouseEvent e) { XChartPanelPopupMenu menu = new XChartPanelPopupMenu(); menu.show(e.getComponent(), e.getX(), e.getY()); } } private class XChartPanelPopupMenu extends JPopupMenu { JMenuItem saveAsMenuItem; public XChartPanelPopupMenu() { saveAsMenuItem = new JMenuItem(saveAsString); saveAsMenuItem.addMouseListener(new MouseListener() { @Override public void mouseReleased(MouseEvent e) { showSaveAsDialog(); } @Override public void mousePressed(MouseEvent e) { } @Override public void mouseExited(MouseEvent e) { } @Override public void mouseEntered(MouseEvent e) { } @Override public void mouseClicked(MouseEvent e) { } }); add(saveAsMenuItem); } } /** * Update a series by updating the X-Axis, Y-Axis and error bar data * * @param seriesName * @param newXData - set null to be automatically generated as a list of increasing Integers starting from * 1 and ending at the size of the new Y-Axis data list. * @param newYData * @param newErrorBarData - set null if there are no error bars * @return */ public Series updateSeries(String seriesName, List<?> newXData, List<? extends Number> newYData, List<? extends Number> newErrorBarData) { Map<String, Series_AxesChart> seriesMap = chart.getSeriesMap(); Series_AxesChart series = seriesMap.get(seriesName); if (series == null) { throw new IllegalArgumentException("Series name >" + seriesName + "< not found!!!"); } if (newXData == null) { // generate X-Data List<Integer> generatedXData = new ArrayList<Integer>(); for (int i = 1; i <= newYData.size(); i++) { generatedXData.add(i); } series.replaceData(generatedXData, newYData, newErrorBarData); } else { series.replaceData(newXData, newYData, newErrorBarData); } // Re-display the chart revalidate(); repaint(); return series; } }
xchart/src/main/java/org/knowm/xchart/XChartPanel.java
/** * Copyright 2015-2016 Knowm Inc. (http://knowm.org) and contributors. * Copyright 2011-2015 Xeiam LLC (http://xeiam.com) and contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.knowm.xchart; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.swing.AbstractAction; import javax.swing.JFileChooser; import javax.swing.JMenuItem; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.KeyStroke; import javax.swing.filechooser.FileFilter; import org.knowm.xchart.BitmapEncoder.BitmapFormat; import org.knowm.xchart.VectorGraphicsEncoder.VectorGraphicsFormat; import org.knowm.xchart.internal.Series; import org.knowm.xchart.internal.Series_AxesChart; import org.knowm.xchart.internal.chartpart.Chart; /** * A Swing JPanel that contains a Chart * <p> * Right-click + Save As... or ctrl+S pops up a Save As dialog box for saving the chart as a JPeg or PNG file. * * @author timmolter */ public class XChartPanel extends JPanel { private final Chart chart; private final Dimension preferredSize; private String saveAsString = "Save As..."; /** * Constructor * * @param chart */ public XChartPanel(final Chart chart) { this.chart = chart; preferredSize = new Dimension(chart.getWidth(), chart.getHeight()); // Right-click listener for saving chart this.addMouseListener(new PopUpMenuClickListener()); // Control+S key listener for saving chart KeyStroke ctrlS = KeyStroke.getKeyStroke(KeyEvent.VK_S, Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()); this.getInputMap(WHEN_IN_FOCUSED_WINDOW).put(ctrlS, "save"); this.getActionMap().put("save", new SaveAction()); } /** * Set the "Save As..." String if you want to localize it. * * @param saveAsString */ public void setSaveAsString(String saveAsString) { this.saveAsString = saveAsString; } @Override protected void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D g2d = (Graphics2D) g.create(); chart.paint(g2d, getWidth(), getHeight()); g2d.dispose(); } public Chart getChart() { return this.chart; } @Override public Dimension getPreferredSize() { return this.preferredSize; } private class SaveAction extends AbstractAction { public SaveAction() { super("save"); } @Override public void actionPerformed(ActionEvent e) { showSaveAsDialog(); } } private void showSaveAsDialog() { JFileChooser fileChooser = new JFileChooser(); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("jpg")); FileFilter pngFileFilter = new SuffixSaveFilter("png"); fileChooser.addChoosableFileFilter(pngFileFilter); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("bmp")); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("gif")); // VectorGraphics2D is optional, so if it's on the classpath, allow saving charts as vector graphic try { Class.forName("de.erichseifert.vectorgraphics2d.VectorGraphics2D"); // it exists on the classpath fileChooser.addChoosableFileFilter(new SuffixSaveFilter("svg")); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("eps")); fileChooser.addChoosableFileFilter(new SuffixSaveFilter("pdf")); } catch (ClassNotFoundException e) { // it does not exist on the classpath } fileChooser.setAcceptAllFileFilterUsed(false); fileChooser.setFileFilter(pngFileFilter); if (fileChooser.showSaveDialog(null) == JFileChooser.APPROVE_OPTION) { if (fileChooser.getSelectedFile() != null) { File theFileToSave = fileChooser.getSelectedFile(); try { if (fileChooser.getFileFilter() == null) { BitmapEncoder.saveBitmap(chart, theFileToSave.getCanonicalPath().toString(), BitmapFormat.PNG); } else if (fileChooser.getFileFilter().getDescription().equals("*.jpg,*.JPG")) { BitmapEncoder.saveJPGWithQuality(chart, BitmapEncoder.addFileExtension(theFileToSave.getCanonicalPath().toString(), BitmapFormat.JPG), 1.0f); } else if (fileChooser.getFileFilter().getDescription().equals("*.png,*.PNG")) { BitmapEncoder.saveBitmap(chart, theFileToSave.getCanonicalPath().toString(), BitmapFormat.PNG); } else if (fileChooser.getFileFilter().getDescription().equals("*.bmp,*.BMP")) { BitmapEncoder.saveBitmap(chart, theFileToSave.getCanonicalPath().toString(), BitmapFormat.BMP); } else if (fileChooser.getFileFilter().getDescription().equals("*.gif,*.GIF")) { BitmapEncoder.saveBitmap(chart, theFileToSave.getCanonicalPath().toString(), BitmapFormat.GIF); } else if (fileChooser.getFileFilter().getDescription().equals("*.svg,*.SVG")) { VectorGraphicsEncoder.saveVectorGraphic(chart, theFileToSave.getCanonicalPath().toString(), VectorGraphicsFormat.SVG); } else if (fileChooser.getFileFilter().getDescription().equals("*.eps,*.EPS")) { VectorGraphicsEncoder.saveVectorGraphic(chart, theFileToSave.getCanonicalPath().toString(), VectorGraphicsFormat.EPS); } else if (fileChooser.getFileFilter().getDescription().equals("*.pdf,*.PDF")) { VectorGraphicsEncoder.saveVectorGraphic(chart, theFileToSave.getCanonicalPath().toString(), VectorGraphicsFormat.PDF); } } catch (IOException e) { e.printStackTrace(); } } } } /** * File filter based on the suffix of a file. This file filter accepts all files that end with .suffix or the capitalized suffix. * * @author Benedikt Bünz */ private class SuffixSaveFilter extends FileFilter { private final String suffix; /** * @param suffix This file filter accepts all files that end with .suffix or the capitalized suffix. */ public SuffixSaveFilter(String suffix) { this.suffix = suffix; } @Override public boolean accept(File f) { if (f.isDirectory()) { return true; } String s = f.getName(); return s.endsWith("." + suffix) || s.endsWith("." + suffix.toUpperCase()); } @Override public String getDescription() { return "*." + suffix + ",*." + suffix.toUpperCase(); } } private class PopUpMenuClickListener extends MouseAdapter { @Override public void mousePressed(MouseEvent e) { if (e.isPopupTrigger()) { doPop(e); } } @Override public void mouseReleased(MouseEvent e) { if (e.isPopupTrigger()) { doPop(e); } } private void doPop(MouseEvent e) { XChartPanelPopupMenu menu = new XChartPanelPopupMenu(); menu.show(e.getComponent(), e.getX(), e.getY()); } } private class XChartPanelPopupMenu extends JPopupMenu { JMenuItem saveAsMenuItem; public XChartPanelPopupMenu() { saveAsMenuItem = new JMenuItem(saveAsString); saveAsMenuItem.addMouseListener(new MouseListener() { @Override public void mouseReleased(MouseEvent e) { showSaveAsDialog(); } @Override public void mousePressed(MouseEvent e) { } @Override public void mouseExited(MouseEvent e) { } @Override public void mouseEntered(MouseEvent e) { } @Override public void mouseClicked(MouseEvent e) { } }); add(saveAsMenuItem); } } /** * Update a series by updating the X-Axis, Y-Axis and error bar data * * @param seriesName * @param newXData - set null to be automatically generated as a list of increasing Integers starting from * 1 and ending at the size of the new Y-Axis data list. * @param newYData * @param newErrorBarData - set null if there are no error bars * @return */ public Series updateSeries(String seriesName, List<?> newXData, List<? extends Number> newYData, List<? extends Number> newErrorBarData) { Map<String, Series_AxesChart> seriesMap = chart.getSeriesMap(); Series_AxesChart series = seriesMap.get(seriesName); if (series == null) { throw new IllegalArgumentException("Series name >" + seriesName + "< not found!!!"); } if (newXData == null) { // generate X-Data List<Integer> generatedXData = new ArrayList<Integer>(); for (int i = 1; i <= newYData.size(); i++) { generatedXData.add(i); } series.replaceData(generatedXData, newYData, newErrorBarData); } else { series.replaceData(newXData, newYData, newErrorBarData); } // Re-display the chart revalidate(); repaint(); return series; } }
Add type parameter to XChartPanel
xchart/src/main/java/org/knowm/xchart/XChartPanel.java
Add type parameter to XChartPanel
<ide><path>chart/src/main/java/org/knowm/xchart/XChartPanel.java <ide> * <ide> * @author timmolter <ide> */ <del>public class XChartPanel extends JPanel { <del> <del> private final Chart chart; <add>public class XChartPanel<T extends Chart> extends JPanel { <add> <add> private final T chart; <ide> private final Dimension preferredSize; <ide> private String saveAsString = "Save As..."; <ide> <ide> * <ide> * @param chart <ide> */ <del> public XChartPanel(final Chart chart) { <add> public XChartPanel(final T chart) { <ide> <ide> this.chart = chart; <ide> preferredSize = new Dimension(chart.getWidth(), chart.getHeight()); <ide> g2d.dispose(); <ide> } <ide> <del> public Chart getChart() { <add> public T getChart() { <ide> <ide> return this.chart; <ide> }
Java
apache-2.0
1eb17b739c1023655b6eee303f006a76f8dac4ae
0
beijing-penguin/myjdbc
package org.dc.jdbc.core.utils; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.dc.jdbc.core.CacheCenter; import org.dc.jdbc.core.entity.ClassRelation; import org.dc.jdbc.core.entity.ColumnBean; import org.dc.jdbc.core.entity.TableInfoBean; import org.dc.jdbc.exceptions.TooManyResultsException; /** * jdbc api封装成的工具类 * * @author DC * */ public class JDBCUtils { private static final Log LOG = LogFactory.getLog(JDBCUtils.class); public static void close(AutoCloseable... ac) throws Exception { for (int i = 0; i < ac.length; i++) { AutoCloseable autoClose = ac[i]; if(autoClose!=null){ try{ autoClose.close(); }catch (Exception e) { LOG.error("",e); } } } } /** * 编译sql并执行查询 * * @param ps * @param sql * @param params * @return 返回结果集对象 * @throws Exception */ public static ResultSet setParamsReturnRS(PreparedStatement ps, Object[] params) throws Exception { setParams(ps, params); return ps.executeQuery(); } /** * 执行sql语句,返回受影响的行数 * * @param conn * @param sql * @param params * @return * @throws Exception */ public static int preparedAndExcuteSQL(Connection conn, String sql, Object[] params) throws Exception { PreparedStatement ps = null; try { ps = conn.prepareStatement(sql); JDBCUtils.setParams(ps, params); return ps.executeUpdate(); } catch (Exception e) { throw e; } finally { close(ps); } } /** * 将sql查询结果转化成map类型的集合 * * @param rs * @param list * @throws Exception */ @SuppressWarnings("unchecked") private static <T> List<T> parseSqlResultToListMap(ResultSet rs) throws Exception { List<Object> list = new ArrayList<Object>(); ResultSetMetaData metaData = rs.getMetaData(); int cols_len = metaData.getColumnCount(); while (rs.next()) { list.add(getMap(rs, metaData, cols_len)); } if (list.size() == 0) { return null; } else { return (List<T>) list; } } /** * 将sql查询结果转化成对象 * * @param <T> * @param rs * @param cls * @param list * @throws Exception */ @SuppressWarnings("unchecked") private static <T> List<T> parseSqlResultToListObject(ResultSet rs, Class<? extends T> cls) throws Exception { List<Object> list = new ArrayList<Object>(); ResultSetMetaData metaData = rs.getMetaData(); int cols_len = metaData.getColumnCount(); while (rs.next()) { list.add(getBeanObject(rs, metaData, cls, cols_len)); } if (list.size() == 0) { return null; } else { return (List<T>) list; } } /** * 将sql查询结果转化成java基本数据类型 * * @param rs * @return * @throws Exception */ @SuppressWarnings("unchecked") private static <T> List<T> parseSqlResultToListBaseType(ResultSet rs) throws Exception { List<Object> list = new ArrayList<Object>(); ResultSetMetaData metaData = rs.getMetaData(); int cols_len = metaData.getColumnCount(); if (cols_len > 1) { throw new TooManyResultsException(); } while (rs.next()) { Object cols_value = getValueByObjectType(metaData, rs, 0); list.add(cols_value); } if (list.size() == 0) { return null; } else { return (List<T>) list; } } /** * 将sql查询结果封装成cls指定的泛型类型的集合并 * * @param rs * @param cls * @return * @throws Exception * 抛出程序可能出现一切异常 */ public static <T> List<T> parseSqlResultList(ResultSet rs, Class<? extends T> cls) throws Exception { if (cls == null || Map.class.isAssignableFrom(cls)) {// 封装成Map return parseSqlResultToListMap(rs); } else { if (cls.getClassLoader() == null) {// 封装成基本类型 return parseSqlResultToListBaseType(rs); } else {// 对象 return parseSqlResultToListObject(rs, cls); } } } public static void setParams(PreparedStatement ps, Object[] params) throws Exception { if (params != null) { for (int i = 0, len = params.length; i < len; i++) { ps.setObject(i + 1, params[i]); } } } public static Object getBeanObjectByClassType(ResultSet rs, Class<?> cls) throws Exception{ ResultSetMetaData metaData = rs.getMetaData(); int cols_len = metaData.getColumnCount(); if (cls == null || Map.class.isAssignableFrom(cls)) {// 封装成Map return JDBCUtils.getMap(rs, metaData, cols_len); }else { if (cls.getClassLoader() == null) {// 封装成基本类型 return JDBCUtils.getValueByObjectType(metaData, rs, 0); } else {// 对象 return JDBCUtils.getBeanObject(rs, metaData, cls, cols_len); } } } public static Object getBeanObject(ResultSet rs, ResultSetMetaData metaData, Class<?> cls, int cols_len) throws Exception { // TableInfoBean tabInfo = // JDBCUtils.getTableInfo(cls,SqlContext.getContext().getCurrentDataSource()); // List<ClassRelation> classRelationsList = // JDBCUtils.getClassRelationList(cls, tabInfo, false); Object obj_newInsten = cls.newInstance(); for (int i = 0; i < cols_len; i++) { String col_name = metaData.getColumnLabel(i + 1); /* * String col_name = metaData.getColumnLabel(i+1); for (int j = 0; j * < classRelationsList.size(); j++) { * if(classRelationsList.get(j).getColumnBean().getColumnName(). * equals(col_name)){ Object cols_value = * getValueByObjectType(metaData, rs, i); Field field = * classRelationsList.get(j).getField(); field.setAccessible(true); * field.set(obj_newInsten, cols_value); break; } } */ Field field = null; try { field = obj_newInsten.getClass().getDeclaredField(col_name); } catch (Exception e) { try { field = obj_newInsten.getClass().getDeclaredField(JDBCUtils.separatorToJavaBean(col_name.toLowerCase())); } catch (Exception e1) { } } if (field != null && !Modifier.isStatic(field.getModifiers())) { Object cols_value = getValueByObjectType(metaData, rs, i); field.setAccessible(true); field.set(obj_newInsten, cols_value); } } return obj_newInsten; } public static Map<String, Object> getMap(ResultSet rs, ResultSetMetaData metaData, int cols_len) throws Exception { Map<String, Object> map = new LinkedHashMap<String, Object>(); for (int i = 0; i < cols_len; i++) { String cols_name = metaData.getColumnLabel(i + 1); Object cols_value = getValueByObjectType(metaData, rs, i); map.put(cols_name, cols_value); } return map; } /** * 获取index指定的值,处理java数据类型和数据库类型的转换问题 * * @param metaData * @param rs * @param index * @return * @throws Exception */ public static Object getValueByObjectType(ResultSetMetaData metaData, ResultSet rs, int index) throws Exception { int columnIndex = index + 1; Object return_obj = rs.getObject(columnIndex); if (return_obj != null) { int type = metaData.getColumnType(columnIndex); switch (type) { case Types.BIT: return_obj = rs.getByte(columnIndex); break; case Types.TINYINT: return_obj = rs.getByte(columnIndex); break; case Types.SMALLINT: return_obj = rs.getShort(columnIndex); break; case Types.LONGVARBINARY: return_obj = rs.getBytes(columnIndex); break; default: return_obj = rs.getObject(columnIndex); } } return return_obj; } public static List<TableInfoBean> getDataBaseInfo(final DataSource dataSource) { List<TableInfoBean> tabList = CacheCenter.DATABASE_INFO_CACHE.get(dataSource); if (tabList == null) { Connection conn = null; try { tabList = new ArrayList<TableInfoBean>(); conn = dataSource.getConnection(); DatabaseMetaData meta = conn.getMetaData(); String jdbcurl = null; String username = null; Field[] fields = dataSource.getClass().getSuperclass().getDeclaredFields(); for(Field field:fields){ if (!Modifier.isStatic(field.getModifiers())) {// 去除静态类型字段 if(field.getName().toLowerCase().contains("url")){ field.setAccessible(true); jdbcurl = field.get(dataSource)==null?null:field.get(dataSource).toString(); } if(field.getName().toLowerCase().contains("username")){ field.setAccessible(true); username = field.get(dataSource)==null?null:field.get(dataSource).toString(); } } } String schema = null; if(jdbcurl!=null && jdbcurl.startsWith("jdbc:oracle:")){ schema = username; } ResultSet tablesResultSet = meta.getTables(conn.getCatalog(), schema==null?null:schema.toUpperCase(), "%", new String[] { "TABLE" }); while (tablesResultSet.next()) { TableInfoBean tableBean = new TableInfoBean(); String tableName = tablesResultSet.getString("TABLE_NAME"); ResultSet colRS = meta.getColumns(conn.getCatalog(), "%", tableName, "%"); tableBean.setTableName(tableName); while (colRS.next()) { ColumnBean colbean = new ColumnBean(); String colName = colRS.getString("COLUMN_NAME"); colbean.setColumnType(colRS.getInt("DATA_TYPE")); colbean.setColumnName(colName); tableBean.getColumnList().add(colbean); } // 设置主键 ResultSet primaryKeyResultSet = meta.getPrimaryKeys(conn.getCatalog(), null, tableName); while (primaryKeyResultSet.next()) { String primaryKeyColumnName = primaryKeyResultSet.getString("COLUMN_NAME"); for (int i = 0; i < tableBean.getColumnList().size(); i++) { ColumnBean colbean = tableBean.getColumnList().get(i); if (colbean.getColumnName().equals(primaryKeyColumnName)) { colbean.setPrimaryKey(true); break; } } } // 检查字段名规范 /* * List<ColumnBean> colList = tableBean.getColumnList(); for * (int i = 0; i < colList.size(); i++) { String col_name = * colList.get(i).getColumnName(); for (int j = i+1; j < * colList.size(); j++) { * if(getBeanName(colList.get(j).getColumnName()). * equalsIgnoreCase(getBeanName(col_name))){ try{ throw new * Exception("field name='"+tableName+"."+ * col_name+"' is not standard"); }catch(Exception e ){ * LOG.error("",e); } } } } */ // 检查表明规范 /* * for (int i = 0; i < tabList.size(); i++) { * if(getBeanName(tabList.get(i).getTableName()). * equalsIgnoreCase(getBeanName(tableName))){ try{ throw new * Exception("table name= '"+tabList.get(i).getTableName() * +"' is not standard"); }catch(Exception e ){ * LOG.error("",e); } } } */ tabList.add(tableBean); } CacheCenter.DATABASE_INFO_CACHE.put(dataSource, tabList); } catch (Exception e) { LOG.info("", e); } finally { try { if (conn != null && !conn.isClosed()) { conn.close(); } } catch (SQLException e) { LOG.info("", e); } } } return tabList; } /** * 将字符串转化为java bean驼峰命名规范 * * @param str * @return */ public static String separatorToJavaBean(String str) { int markIndex = str.lastIndexOf("_"); if (markIndex != -1) { String startStr = str.substring(0, markIndex); String endStr = str.substring(markIndex, str.length()); String newStr = startStr.toLowerCase() + endStr.substring(1, 2).toUpperCase() + endStr.substring(2); return separatorToJavaBean(newStr); } else { return str.substring(0, 1).toLowerCase() + str.substring(1); } } /** * 将驼峰命名的java字符串转下划线或者其他分隔符(默认分隔符为下划线) * * @param str * @return */ public static String javaBeanToSeparator(String str, Character separatorChar) { if (str == null || str.length() == 0) { return null; } if (separatorChar == null) { separatorChar = '_'; } StringBuilder sb = new StringBuilder(str); int index = 0; for (int i = 1; i < str.length(); i++) { char c = str.charAt(i); if (Character.isUpperCase(c)) { sb.replace(i + index, i + 1 + index, String.valueOf(c).toLowerCase()); sb.insert(i + index, separatorChar); index++; } } return sb.toString().toLowerCase(); } public static List<ClassRelation> getClassRelationList(Class<?> entityClass, TableInfoBean tabInfo) throws Exception { List<ClassRelation> classRelationsList = CacheCenter.CLASS_REL_FIELD_CACHE.get(entityClass); if (CacheCenter.CLASS_REL_FIELD_CACHE.containsKey(entityClass)) { return CacheCenter.CLASS_REL_FIELD_CACHE.get(entityClass); } else { List<ColumnBean> colList = tabInfo.getColumnList(); Field[] fieldArr = entityClass.getDeclaredFields(); classRelationsList = new ArrayList<ClassRelation>(); for (int i = 0, len = fieldArr.length; i < len; i++) { Field field = fieldArr[i]; if (!Modifier.isStatic(field.getModifiers())) {// 去除静态类型字段 String fdName = field.getName(); ClassRelation cr = null; for (int j = 0, lenn = colList.size(); j < lenn; j++) { ColumnBean colbean = colList.get(j); if (fdName.equalsIgnoreCase(colbean.getColumnName())) { cr = new ClassRelation(); cr.setColumnBean(colbean); cr.setField(field); break; } } if (cr == null) { for (int j = 0, lenn = colList.size(); j < lenn; j++) { ColumnBean colbean = colList.get(j); if (fdName.equalsIgnoreCase(JDBCUtils.separatorToJavaBean(colbean.getColumnName()))) { cr = new ClassRelation(); cr.setColumnBean(colbean); cr.setField(field); break; } } } if (cr != null) { classRelationsList.add(cr); } } } CacheCenter.CLASS_REL_FIELD_CACHE.put(entityClass, classRelationsList); return classRelationsList; } } public static TableInfoBean getTableInfoByClass(Class<?> entityClass, DataSource dataSource) { if (CacheCenter.SQL_TABLE_CACHE.containsKey(entityClass)) { return CacheCenter.SQL_TABLE_CACHE.get(entityClass); } else { TableInfoBean tabInfo = null; List<TableInfoBean> tableList = getDataBaseInfo(dataSource); String entityName = entityClass.getSimpleName(); for (int i = 0, len = tableList.size(); i < len; i++) { TableInfoBean tableBean = tableList.get(i); if (entityName.equalsIgnoreCase(tableBean.getTableName())) { tabInfo = tableBean; break; } } if (tabInfo == null) { for (int i = 0, len = tableList.size(); i < len; i++) { TableInfoBean tableBean = tableList.get(i); if (entityName.equalsIgnoreCase(JDBCUtils.separatorToJavaBean(tableBean.getTableName()))) { tabInfo = tableBean; break; } } } if(tabInfo==null){ return null; } CacheCenter.SQL_TABLE_CACHE.put(entityClass, tabInfo); return tabInfo; } } public static String getFinalSql(String sqlOrID){ return sqlOrID.startsWith("$") ? CacheCenter.SQL_SOURCE_MAP.get(sqlOrID) : sqlOrID; } }
src/org/dc/jdbc/core/utils/JDBCUtils.java
package org.dc.jdbc.core.utils; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.dc.jdbc.core.CacheCenter; import org.dc.jdbc.core.entity.ClassRelation; import org.dc.jdbc.core.entity.ColumnBean; import org.dc.jdbc.core.entity.TableInfoBean; import org.dc.jdbc.exceptions.TooManyResultsException; /** * jdbc api封装成的工具类 * * @author DC * */ public class JDBCUtils { private static final Log LOG = LogFactory.getLog(JDBCUtils.class); public static void close(AutoCloseable... ac) throws Exception { for (int i = 0; i < ac.length; i++) { AutoCloseable autoClose = ac[i]; if(autoClose!=null){ try{ autoClose.close(); }catch (Exception e) { LOG.error("",e); } } } } /** * 编译sql并执行查询 * * @param ps * @param sql * @param params * @return 返回结果集对象 * @throws Exception */ public static ResultSet setParamsReturnRS(PreparedStatement ps, Object[] params) throws Exception { setParams(ps, params); return ps.executeQuery(); } /** * 执行sql语句,返回受影响的行数 * * @param conn * @param sql * @param params * @return * @throws Exception */ public static int preparedAndExcuteSQL(Connection conn, String sql, Object[] params) throws Exception { PreparedStatement ps = null; try { ps = conn.prepareStatement(sql); JDBCUtils.setParams(ps, params); return ps.executeUpdate(); } catch (Exception e) { throw e; } finally { close(ps); } } /** * 将sql查询结果转化成map类型的集合 * * @param rs * @param list * @throws Exception */ @SuppressWarnings("unchecked") private static <T> List<T> parseSqlResultToListMap(ResultSet rs) throws Exception { List<Object> list = new ArrayList<Object>(); ResultSetMetaData metaData = rs.getMetaData(); int cols_len = metaData.getColumnCount(); while (rs.next()) { list.add(getMap(rs, metaData, cols_len)); } if (list.size() == 0) { return null; } else { return (List<T>) list; } } /** * 将sql查询结果转化成对象 * * @param <T> * @param rs * @param cls * @param list * @throws Exception */ @SuppressWarnings("unchecked") private static <T> List<T> parseSqlResultToListObject(ResultSet rs, Class<? extends T> cls) throws Exception { List<Object> list = new ArrayList<Object>(); ResultSetMetaData metaData = rs.getMetaData(); int cols_len = metaData.getColumnCount(); while (rs.next()) { list.add(getBeanObject(rs, metaData, cls, cols_len)); } if (list.size() == 0) { return null; } else { return (List<T>) list; } } /** * 将sql查询结果转化成java基本数据类型 * * @param rs * @return * @throws Exception */ @SuppressWarnings("unchecked") private static <T> List<T> parseSqlResultToListBaseType(ResultSet rs) throws Exception { List<Object> list = new ArrayList<Object>(); ResultSetMetaData metaData = rs.getMetaData(); int cols_len = metaData.getColumnCount(); if (cols_len > 1) { throw new TooManyResultsException(); } while (rs.next()) { Object cols_value = getValueByObjectType(metaData, rs, 0); list.add(cols_value); } if (list.size() == 0) { return null; } else { return (List<T>) list; } } /** * 将sql查询结果封装成cls指定的泛型类型的集合并 * * @param rs * @param cls * @return * @throws Exception * 抛出程序可能出现一切异常 */ public static <T> List<T> parseSqlResultList(ResultSet rs, Class<? extends T> cls) throws Exception { if (cls == null || Map.class.isAssignableFrom(cls)) {// 封装成Map return parseSqlResultToListMap(rs); } else { if (cls.getClassLoader() == null) {// 封装成基本类型 return parseSqlResultToListBaseType(rs); } else {// 对象 return parseSqlResultToListObject(rs, cls); } } } public static void setParams(PreparedStatement ps, Object[] params) throws Exception { if (params != null) { for (int i = 0, len = params.length; i < len; i++) { ps.setObject(i + 1, params[i]); } } } public static Object getBeanObjectByClassType(ResultSet rs, Class<?> cls) throws Exception{ ResultSetMetaData metaData = rs.getMetaData(); int cols_len = metaData.getColumnCount(); if (cls == null || Map.class.isAssignableFrom(cls)) {// 封装成Map return JDBCUtils.getMap(rs, metaData, cols_len); }else { if (cls.getClassLoader() == null) {// 封装成基本类型 return JDBCUtils.getValueByObjectType(metaData, rs, 0); } else {// 对象 return JDBCUtils.getBeanObject(rs, metaData, cls, cols_len); } } } public static Object getBeanObject(ResultSet rs, ResultSetMetaData metaData, Class<?> cls, int cols_len) throws Exception { // TableInfoBean tabInfo = // JDBCUtils.getTableInfo(cls,SqlContext.getContext().getCurrentDataSource()); // List<ClassRelation> classRelationsList = // JDBCUtils.getClassRelationList(cls, tabInfo, false); Object obj_newInsten = cls.newInstance(); for (int i = 0; i < cols_len; i++) { String col_name = metaData.getColumnLabel(i + 1); /* * String col_name = metaData.getColumnLabel(i+1); for (int j = 0; j * < classRelationsList.size(); j++) { * if(classRelationsList.get(j).getColumnBean().getColumnName(). * equals(col_name)){ Object cols_value = * getValueByObjectType(metaData, rs, i); Field field = * classRelationsList.get(j).getField(); field.setAccessible(true); * field.set(obj_newInsten, cols_value); break; } } */ Field field = null; try { field = obj_newInsten.getClass().getDeclaredField(col_name); } catch (Exception e) { try { field = obj_newInsten.getClass().getDeclaredField(JDBCUtils.separatorToJavaBean(col_name.toLowerCase())); } catch (Exception e1) { } } if (field != null && !Modifier.isStatic(field.getModifiers())) { Object cols_value = getValueByObjectType(metaData, rs, i); field.setAccessible(true); field.set(obj_newInsten, cols_value); } } return obj_newInsten; } public static Map<String, Object> getMap(ResultSet rs, ResultSetMetaData metaData, int cols_len) throws Exception { Map<String, Object> map = new LinkedHashMap<String, Object>(); for (int i = 0; i < cols_len; i++) { String cols_name = metaData.getColumnLabel(i + 1); Object cols_value = getValueByObjectType(metaData, rs, i); map.put(cols_name, cols_value); } return map; } /** * 获取index指定的值,处理java数据类型和数据库类型的转换问题 * * @param metaData * @param rs * @param index * @return * @throws Exception */ public static Object getValueByObjectType(ResultSetMetaData metaData, ResultSet rs, int index) throws Exception { int columnIndex = index + 1; Object return_obj = rs.getObject(columnIndex); if (return_obj != null) { int type = metaData.getColumnType(columnIndex); switch (type) { case Types.BIT: return_obj = rs.getByte(columnIndex); break; case Types.TINYINT: return_obj = rs.getByte(columnIndex); break; case Types.SMALLINT: return_obj = rs.getShort(columnIndex); break; case Types.LONGVARBINARY: return_obj = rs.getBytes(columnIndex); break; default: return_obj = rs.getObject(columnIndex); } } return return_obj; } public static List<TableInfoBean> getDataBaseInfo(final DataSource dataSource) { List<TableInfoBean> tabList = CacheCenter.DATABASE_INFO_CACHE.get(dataSource); if (tabList == null) { Connection conn = null; try { tabList = new ArrayList<TableInfoBean>(); conn = dataSource.getConnection(); DatabaseMetaData meta = conn.getMetaData(); String jdbcurl = null; String username = null; Field[] fields = dataSource.getClass().getSuperclass().getDeclaredFields(); for(Field field:fields){ if (!Modifier.isStatic(field.getModifiers())) {// 去除静态类型字段 if(field.getName().toLowerCase().contains("url")){ field.setAccessible(true); jdbcurl = field.get(dataSource)==null?null:field.get(dataSource).toString(); } if(field.getName().toLowerCase().contains("username")){ field.setAccessible(true); username = field.get(dataSource)==null?null:field.get(dataSource).toString(); } } } String schema = null; if(jdbcurl!=null && jdbcurl.startsWith("jdbc:oracle:")){ schema = username; } ResultSet tablesResultSet = meta.getTables(conn.getCatalog(), schema==null?null:schema.toUpperCase(), "%", new String[] { "TABLE" }); while (tablesResultSet.next()) { TableInfoBean tableBean = new TableInfoBean(); String tableName = tablesResultSet.getString("TABLE_NAME"); ResultSet colRS = meta.getColumns(conn.getCatalog(), "%", tableName, "%"); tableBean.setTableName(tableName); while (colRS.next()) { ColumnBean colbean = new ColumnBean(); String colName = colRS.getString("COLUMN_NAME"); colbean.setColumnType(colRS.getInt("DATA_TYPE")); colbean.setColumnName(colName); tableBean.getColumnList().add(colbean); } // 设置主键 ResultSet primaryKeyResultSet = meta.getPrimaryKeys(conn.getCatalog(), null, tableName); while (primaryKeyResultSet.next()) { String primaryKeyColumnName = primaryKeyResultSet.getString("COLUMN_NAME"); for (int i = 0; i < tableBean.getColumnList().size(); i++) { ColumnBean colbean = tableBean.getColumnList().get(i); if (colbean.getColumnName().equals(primaryKeyColumnName)) { colbean.setPrimaryKey(true); break; } } } // 检查字段名规范 /* * List<ColumnBean> colList = tableBean.getColumnList(); for * (int i = 0; i < colList.size(); i++) { String col_name = * colList.get(i).getColumnName(); for (int j = i+1; j < * colList.size(); j++) { * if(getBeanName(colList.get(j).getColumnName()). * equalsIgnoreCase(getBeanName(col_name))){ try{ throw new * Exception("field name='"+tableName+"."+ * col_name+"' is not standard"); }catch(Exception e ){ * LOG.error("",e); } } } } */ // 检查表明规范 /* * for (int i = 0; i < tabList.size(); i++) { * if(getBeanName(tabList.get(i).getTableName()). * equalsIgnoreCase(getBeanName(tableName))){ try{ throw new * Exception("table name= '"+tabList.get(i).getTableName() * +"' is not standard"); }catch(Exception e ){ * LOG.error("",e); } } } */ tabList.add(tableBean); } CacheCenter.DATABASE_INFO_CACHE.put(dataSource, tabList); } catch (Exception e) { LOG.info("", e); } finally { try { if (conn != null && !conn.isClosed()) { conn.close(); } } catch (SQLException e) { LOG.info("", e); } } } return tabList; } /** * 将字符串转化为java bean驼峰命名规范 * * @param str * @return */ public static String separatorToJavaBean(String str) { int markIndex = str.lastIndexOf("_"); if (markIndex != -1) { String startStr = str.substring(0, markIndex); String endStr = str.substring(markIndex, str.length()); String newStr = startStr + endStr.substring(1, 2).toUpperCase() + endStr.substring(2); return separatorToJavaBean(newStr); } else { return str.substring(0, 1).toLowerCase() + str.substring(1); } } /** * 将驼峰命名的java字符串转下划线或者其他分隔符(默认分隔符为下划线) * * @param str * @return */ public static String javaBeanToSeparator(String str, Character separatorChar) { if (str == null || str.length() == 0) { return null; } if (separatorChar == null) { separatorChar = '_'; } StringBuilder sb = new StringBuilder(str); int index = 0; for (int i = 1; i < str.length(); i++) { char c = str.charAt(i); if (Character.isUpperCase(c)) { sb.replace(i + index, i + 1 + index, String.valueOf(c).toLowerCase()); sb.insert(i + index, separatorChar); index++; } } return sb.toString().toLowerCase(); } public static List<ClassRelation> getClassRelationList(Class<?> entityClass, TableInfoBean tabInfo) throws Exception { List<ClassRelation> classRelationsList = CacheCenter.CLASS_REL_FIELD_CACHE.get(entityClass); if (CacheCenter.CLASS_REL_FIELD_CACHE.containsKey(entityClass)) { return CacheCenter.CLASS_REL_FIELD_CACHE.get(entityClass); } else { List<ColumnBean> colList = tabInfo.getColumnList(); Field[] fieldArr = entityClass.getDeclaredFields(); classRelationsList = new ArrayList<ClassRelation>(); for (int i = 0, len = fieldArr.length; i < len; i++) { Field field = fieldArr[i]; if (!Modifier.isStatic(field.getModifiers())) {// 去除静态类型字段 String fdName = field.getName(); ClassRelation cr = null; for (int j = 0, lenn = colList.size(); j < lenn; j++) { ColumnBean colbean = colList.get(j); if (fdName.equalsIgnoreCase(colbean.getColumnName())) { cr = new ClassRelation(); cr.setColumnBean(colbean); cr.setField(field); break; } } if (cr == null) { for (int j = 0, lenn = colList.size(); j < lenn; j++) { ColumnBean colbean = colList.get(j); if (fdName.equalsIgnoreCase(JDBCUtils.separatorToJavaBean(colbean.getColumnName()))) { cr = new ClassRelation(); cr.setColumnBean(colbean); cr.setField(field); break; } } } if (cr != null) { classRelationsList.add(cr); } } } CacheCenter.CLASS_REL_FIELD_CACHE.put(entityClass, classRelationsList); return classRelationsList; } } public static TableInfoBean getTableInfoByClass(Class<?> entityClass, DataSource dataSource) { if (CacheCenter.SQL_TABLE_CACHE.containsKey(entityClass)) { return CacheCenter.SQL_TABLE_CACHE.get(entityClass); } else { TableInfoBean tabInfo = null; List<TableInfoBean> tableList = getDataBaseInfo(dataSource); String entityName = entityClass.getSimpleName(); for (int i = 0, len = tableList.size(); i < len; i++) { TableInfoBean tableBean = tableList.get(i); if (entityName.equalsIgnoreCase(tableBean.getTableName())) { tabInfo = tableBean; break; } } if (tabInfo == null) { for (int i = 0, len = tableList.size(); i < len; i++) { TableInfoBean tableBean = tableList.get(i); if (entityName.equalsIgnoreCase(JDBCUtils.separatorToJavaBean(tableBean.getTableName()))) { tabInfo = tableBean; break; } } } if(tabInfo==null){ return null; } CacheCenter.SQL_TABLE_CACHE.put(entityClass, tabInfo); return tabInfo; } } public static String getFinalSql(String sqlOrID){ return sqlOrID.startsWith("$") ? CacheCenter.SQL_SOURCE_MAP.get(sqlOrID) : sqlOrID; } }
优化:字符串转javabean方法
src/org/dc/jdbc/core/utils/JDBCUtils.java
优化:字符串转javabean方法
<ide><path>rc/org/dc/jdbc/core/utils/JDBCUtils.java <ide> if (markIndex != -1) { <ide> String startStr = str.substring(0, markIndex); <ide> String endStr = str.substring(markIndex, str.length()); <del> String newStr = startStr + endStr.substring(1, 2).toUpperCase() + endStr.substring(2); <add> String newStr = startStr.toLowerCase() + endStr.substring(1, 2).toUpperCase() + endStr.substring(2); <ide> return separatorToJavaBean(newStr); <ide> } else { <ide> return str.substring(0, 1).toLowerCase() + str.substring(1); <ide> return sb.toString().toLowerCase(); <ide> } <ide> <del> public static List<ClassRelation> getClassRelationList(Class<?> entityClass, TableInfoBean tabInfo) <del> throws Exception { <add> public static List<ClassRelation> getClassRelationList(Class<?> entityClass, TableInfoBean tabInfo) throws Exception { <ide> List<ClassRelation> classRelationsList = CacheCenter.CLASS_REL_FIELD_CACHE.get(entityClass); <ide> if (CacheCenter.CLASS_REL_FIELD_CACHE.containsKey(entityClass)) { <ide> return CacheCenter.CLASS_REL_FIELD_CACHE.get(entityClass);
Java
apache-2.0
2d95dfa60c88f91facd47e04ea291f5005155d95
0
allotria/intellij-community,da1z/intellij-community,xfournet/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,signed/intellij-community,youdonghai/intellij-community,allotria/intellij-community,xfournet/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,semonte/intellij-community,allotria/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,da1z/intellij-community,asedunov/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,allotria/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,asedunov/intellij-community,semonte/intellij-community,allotria/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,semonte/intellij-community,vvv1559/intellij-community,semonte/intellij-community,apixandru/intellij-community,apixandru/intellij-community,semonte/intellij-community,signed/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,FHannes/intellij-community,da1z/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,signed/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,signed/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,allotria/intellij-community,semonte/intellij-community,da1z/intellij-community,allotria/intellij-community,da1z/intellij-community,youdonghai/intellij-community,da1z/intellij-community,ibinti/intellij-community,ibinti/intellij-community,apixandru/intellij-community,apixandru/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,allotria/intellij-community,FHannes/intellij-community,da1z/intellij-community,da1z/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,signed/intellij-community,semonte/intellij-community,apixandru/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,xfournet/intellij-community,allotria/intellij-community,allotria/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,signed/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,signed/intellij-community,apixandru/intellij-community,FHannes/intellij-community,allotria/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,semonte/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,xfournet/intellij-community,apixandru/intellij-community,da1z/intellij-community,ibinti/intellij-community,asedunov/intellij-community,apixandru/intellij-community,FHannes/intellij-community,allotria/intellij-community,ibinti/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,FHannes/intellij-community,apixandru/intellij-community,signed/intellij-community,vvv1559/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,semonte/intellij-community,suncycheng/intellij-community,signed/intellij-community,signed/intellij-community,xfournet/intellij-community,da1z/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,da1z/intellij-community,da1z/intellij-community,apixandru/intellij-community,xfournet/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,semonte/intellij-community,asedunov/intellij-community,da1z/intellij-community,asedunov/intellij-community,signed/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,FHannes/intellij-community
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.ui.search; import com.intellij.application.options.SkipSelfSearchComponent; import com.intellij.ide.actions.ShowSettingsUtilImpl; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurableGroup; import com.intellij.openapi.options.MasterDetails; import com.intellij.openapi.options.SearchableConfigurable; import com.intellij.openapi.options.ex.GlassPanel; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.JBColor; import com.intellij.ui.SimpleColoredComponent; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.TabbedPaneWrapper; import com.intellij.ui.tabs.JBTabs; import com.intellij.ui.tabs.TabInfo; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.TitledBorder; import java.awt.*; import java.util.*; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author anna * @since 07.02.2006 */ public class SearchUtil { private static final Pattern HTML_PATTERN = Pattern.compile("<[^<>]*>"); private static final Pattern QUOTED = Pattern.compile("\"([^\"]+)\""); public static final String HIGHLIGHT_WITH_BORDER = "searchUtil.highlightWithBorder"; public static final String STYLE_END = "</style>"; private SearchUtil() { } public static void processProjectConfigurables(Project project, Map<SearchableConfigurable, Set<OptionDescription>> options) { processConfigurables(ShowSettingsUtilImpl.getConfigurables(project, false), options); } private static void processConfigurables(Configurable[] configurables, Map<SearchableConfigurable, Set<OptionDescription>> options) { for (Configurable configurable : configurables) { if (configurable instanceof SearchableConfigurable) { Set<OptionDescription> configurableOptions = new TreeSet<>(); if (configurable instanceof Configurable.Composite) { final Configurable[] children = ((Configurable.Composite)configurable).getConfigurables(); processConfigurables(children, options); } //ignore invisible root nodes if (configurable instanceof SearchableConfigurable.Parent && !((SearchableConfigurable.Parent)configurable).isVisible()) { continue; } options.put((SearchableConfigurable)configurable, configurableOptions); if (configurable instanceof MasterDetails) { final MasterDetails md = (MasterDetails)configurable; md.initUi(); processComponent(configurable, configurableOptions, md.getMaster()); processComponent(configurable, configurableOptions, md.getDetails().getComponent()); } else { processComponent(configurable, configurableOptions, configurable.createComponent()); } } } } private static void processComponent(Configurable configurable, Set<OptionDescription> configurableOptions, JComponent component) { if (component != null) { processUILabel(configurable.getDisplayName(), configurableOptions, null); processComponent(component, configurableOptions, null); } } private static void processComponent(JComponent component, Set<OptionDescription> configurableOptions, String path) { if (component instanceof SkipSelfSearchComponent) return; final Border border = component.getBorder(); if (border instanceof TitledBorder) { final TitledBorder titledBorder = (TitledBorder)border; final String title = titledBorder.getTitle(); if (title != null) { processUILabel(title, configurableOptions, path); } } if (component instanceof JLabel) { final String label = ((JLabel)component).getText(); if (label != null) { processUILabel(label, configurableOptions, path); } } else if (component instanceof JCheckBox) { final String checkBoxTitle = ((JCheckBox)component).getText(); if (checkBoxTitle != null) { processUILabel(checkBoxTitle, configurableOptions, path); } } else if (component instanceof JRadioButton) { final String radioButtonTitle = ((JRadioButton)component).getText(); if (radioButtonTitle != null) { processUILabel(radioButtonTitle, configurableOptions, path); } } else if (component instanceof JButton) { final String buttonTitle = ((JButton)component).getText(); if (buttonTitle != null) { processUILabel(buttonTitle, configurableOptions, path); } } if (component instanceof JTabbedPane) { final JTabbedPane tabbedPane = (JTabbedPane)component; final int tabCount = tabbedPane.getTabCount(); for (int i = 0; i < tabCount; i++) { final String title = path != null ? path + '.' + tabbedPane.getTitleAt(i) : tabbedPane.getTitleAt(i); processUILabel(title, configurableOptions, title); final Component tabComponent = tabbedPane.getComponentAt(i); if (tabComponent instanceof JComponent) { processComponent((JComponent)tabComponent, configurableOptions, title); } } } else if (component instanceof JBTabs) { final JBTabs tabbedPane = (JBTabs)component; final int tabCount = tabbedPane.getTabCount(); for (int i = 0; i < tabCount; i++) { TabInfo tabInfo = tabbedPane.getTabAt(i); String tabTitle = tabInfo.getText(); final String title = path != null ? path + '.' + tabTitle : tabTitle; processUILabel(title, configurableOptions, title); final JComponent tabComponent = tabInfo.getComponent(); if (tabComponent != null) { processComponent(tabComponent, configurableOptions, title); } } } else { final Component[] components = component.getComponents(); if (components != null) { for (Component child : components) { if (child instanceof JComponent) { processComponent((JComponent)child, configurableOptions, path); } } } } } private static void processUILabel(String title, Set<OptionDescription> configurableOptions, String path) { final Set<String> words = SearchableOptionsRegistrar.getInstance().getProcessedWordsWithoutStemming(title); final String regex = "[\\W&&[^\\p{Punct}\\p{Blank}]]"; for (String option : words) { configurableOptions.add(new OptionDescription(option, HTML_PATTERN.matcher(title).replaceAll(" ").replaceAll(regex, " "), path)); } } public static Runnable lightOptions(SearchableConfigurable configurable, JComponent component, String option, GlassPanel glassPanel) { return () -> { if (!traverseComponentsTree(configurable, glassPanel, component, option, true)) { traverseComponentsTree(configurable, glassPanel, component, option, false); } }; } private static int getSelection(String tabIdx, final JTabbedPane tabbedPane) { SearchableOptionsRegistrar searchableOptionsRegistrar = SearchableOptionsRegistrar.getInstance(); for (int i = 0; i < tabbedPane.getTabCount(); i++) { final Set<String> pathWords = searchableOptionsRegistrar.getProcessedWords(tabIdx); final String title = tabbedPane.getTitleAt(i); if (!pathWords.isEmpty()) { final Set<String> titleWords = searchableOptionsRegistrar.getProcessedWords(title); pathWords.removeAll(titleWords); if (pathWords.isEmpty()) return i; } else if (tabIdx.equalsIgnoreCase(title)) { //e.g. only stop words return i; } } return -1; } public static int getSelection(String tabIdx, final TabbedPaneWrapper tabbedPane) { SearchableOptionsRegistrar searchableOptionsRegistrar = SearchableOptionsRegistrar.getInstance(); for (int i = 0; i < tabbedPane.getTabCount(); i++) { final Set<String> pathWords = searchableOptionsRegistrar.getProcessedWords(tabIdx); final String title = tabbedPane.getTitleAt(i); final Set<String> titleWords = searchableOptionsRegistrar.getProcessedWords(title); pathWords.removeAll(titleWords); if (pathWords.isEmpty()) return i; } return -1; } private static boolean traverseComponentsTree(SearchableConfigurable configurable, GlassPanel glassPanel, JComponent rootComponent, String option, boolean force) { rootComponent.putClientProperty(HIGHLIGHT_WITH_BORDER, null); if (option == null || option.trim().length() == 0) return false; boolean highlight = false; if (rootComponent instanceof JCheckBox) { final JCheckBox checkBox = ((JCheckBox)rootComponent); if (isComponentHighlighted(checkBox.getText(), option, force, configurable)) { highlight = true; glassPanel.addSpotlight(checkBox); } } else if (rootComponent instanceof JRadioButton) { final JRadioButton radioButton = ((JRadioButton)rootComponent); if (isComponentHighlighted(radioButton.getText(), option, force, configurable)) { highlight = true; glassPanel.addSpotlight(radioButton); } } else if (rootComponent instanceof JLabel) { final JLabel label = ((JLabel)rootComponent); if (isComponentHighlighted(label.getText(), option, force, configurable)) { highlight = true; glassPanel.addSpotlight(label); } } else if (rootComponent instanceof JButton) { final JButton button = ((JButton)rootComponent); if (isComponentHighlighted(button.getText(), option, force, configurable)) { highlight = true; glassPanel.addSpotlight(button); } } else if (rootComponent instanceof JTabbedPane) { final JTabbedPane tabbedPane = (JTabbedPane)rootComponent; final String path = SearchableOptionsRegistrar.getInstance().getInnerPath(configurable, option); if (path != null) { final int index = getSelection(path, tabbedPane); if (index > -1 && index < tabbedPane.getTabCount()) { if (tabbedPane.getTabComponentAt(index) instanceof JComponent) { glassPanel.addSpotlight((JComponent)tabbedPane.getTabComponentAt(index)); } } } } else if (rootComponent instanceof TabbedPaneWrapper.TabbedPaneHolder) { final TabbedPaneWrapper tabbedPaneWrapper = ((TabbedPaneWrapper.TabbedPaneHolder)rootComponent).getTabbedPaneWrapper(); final String path = SearchableOptionsRegistrar.getInstance().getInnerPath(configurable, option); if (path != null) { final int index = getSelection(path, tabbedPaneWrapper); if (index > -1 && index < tabbedPaneWrapper.getTabCount()) { glassPanel.addSpotlight((JComponent)tabbedPaneWrapper.getTabComponentAt(index)); } } } final Component[] components = rootComponent.getComponents(); for (Component component : components) { if (component instanceof JComponent) { final boolean innerHighlight = traverseComponentsTree(configurable, glassPanel, (JComponent)component, option, force); if (!highlight && !innerHighlight) { final Border border = rootComponent.getBorder(); if (border instanceof TitledBorder) { final String title = ((TitledBorder)border).getTitle(); if (isComponentHighlighted(title, option, force, configurable)) { highlight = true; glassPanel.addSpotlight(rootComponent); rootComponent.putClientProperty(HIGHLIGHT_WITH_BORDER, Boolean.TRUE); } } } if (innerHighlight) { highlight = true; } } } return highlight; } public static boolean isComponentHighlighted(String text, String option, boolean force, final SearchableConfigurable configurable) { if (text == null || option == null || option.length() == 0) return false; final SearchableOptionsRegistrar searchableOptionsRegistrar = SearchableOptionsRegistrar.getInstance(); final Set<String> words = searchableOptionsRegistrar.getProcessedWords(option); final Set<String> options = configurable != null ? searchableOptionsRegistrar.replaceSynonyms(words, configurable) : words; if (options == null || options.isEmpty()) { return text.toLowerCase(Locale.US).contains(option.toLowerCase(Locale.US)); } final Set<String> tokens = searchableOptionsRegistrar.getProcessedWords(text); if (!force) { options.retainAll(tokens); final boolean highlight = !options.isEmpty(); return highlight || text.toLowerCase(Locale.US).contains(option.toLowerCase(Locale.US)); } else { options.removeAll(tokens); return options.isEmpty(); } } public static String markup(@NotNull String textToMarkup, @Nullable String filter) { if (filter == null || filter.length() == 0) { return textToMarkup; } int bodyStart = textToMarkup.indexOf("<body>"); final int bodyEnd = textToMarkup.indexOf("</body>"); final String head; final String foot; if (bodyStart >= 0) { bodyStart += "<body>".length(); head = textToMarkup.substring(0, bodyStart); if (bodyEnd >= 0) { foot = textToMarkup.substring(bodyEnd); } else { foot = ""; } textToMarkup = textToMarkup.substring(bodyStart, bodyEnd); } else { foot = ""; head = ""; } final Pattern insideHtmlTagPattern = Pattern.compile("[<[^<>]*>]*<[^<>]*"); final SearchableOptionsRegistrar registrar = SearchableOptionsRegistrar.getInstance(); final HashSet<String> quoted = new HashSet<>(); filter = processFilter(quoteStrictOccurrences(textToMarkup, filter), quoted); final Set<String> options = registrar.getProcessedWords(filter); final Set<String> words = registrar.getProcessedWords(textToMarkup); for (String option : options) { if (words.contains(option)) { textToMarkup = markup(textToMarkup, insideHtmlTagPattern, option); } } for (String stripped : quoted) { if (registrar.isStopWord(stripped)) continue; textToMarkup = markup(textToMarkup, insideHtmlTagPattern, stripped); } return head + textToMarkup + foot; } private static String quoteStrictOccurrences(final String textToMarkup, final String filter) { String cur = ""; final String s = textToMarkup.toLowerCase(Locale.US); for (String part : filter.split(" ")) { if (s.contains(part)) { cur += "\"" + part + "\" "; } else { cur += part + " "; } } return cur; } private static String markup(String textToMarkup, final Pattern insideHtmlTagPattern, final String option) { final int styleIdx = textToMarkup.indexOf("<style"); final int styleEndIdx = textToMarkup.indexOf("</style>"); if (styleIdx < 0 || styleEndIdx < 0) { return markupInText(textToMarkup, insideHtmlTagPattern, option); } return markup(textToMarkup.substring(0, styleIdx), insideHtmlTagPattern, option) + markup(textToMarkup.substring(styleEndIdx + STYLE_END.length()), insideHtmlTagPattern, option); } private static String markupInText(String textToMarkup, Pattern insideHtmlTagPattern, String option) { String result = ""; int beg = 0; int idx; while ((idx = StringUtil.indexOfIgnoreCase(textToMarkup, option, beg)) != -1) { final String prefix = textToMarkup.substring(beg, idx); final String toMark = textToMarkup.substring(idx, idx + option.length()); if (insideHtmlTagPattern.matcher(prefix).matches()) { final int lastIdx = textToMarkup.indexOf(">", idx); result += prefix + textToMarkup.substring(idx, lastIdx + 1); beg = lastIdx + 1; } else { result += prefix + "<font color='#ffffff' bgColor='#1d5da7'>" + toMark + "</font>"; beg = idx + option.length(); } } result += textToMarkup.substring(beg); return result; } public static void appendFragments(String filter, String text, @SimpleTextAttributes.StyleAttributeConstant int style, final Color foreground, final Color background, final SimpleColoredComponent textRenderer) { if (text == null) return; if (filter == null || filter.length() == 0) { textRenderer.append(text, new SimpleTextAttributes(background, foreground, JBColor.RED, style)); } else { //markup final HashSet<String> quoted = new HashSet<>(); filter = processFilter(quoteStrictOccurrences(text, filter), quoted); final TreeMap<Integer, String> indx = new TreeMap<>(); for (String stripped : quoted) { int beg = 0; int idx; while ((idx = StringUtil.indexOfIgnoreCase(text, stripped, beg)) != -1) { indx.put(idx, text.substring(idx, idx + stripped.length())); beg = idx + stripped.length(); } } final List<String> selectedWords = new ArrayList<>(); int pos = 0; for (Integer index : indx.keySet()) { final String stripped = indx.get(index); final int start = index.intValue(); if (pos > start) { final String highlighted = selectedWords.get(selectedWords.size() - 1); if (highlighted.length() < stripped.length()) { selectedWords.remove(highlighted); } else { continue; } } appendSelectedWords(text, selectedWords, pos, start, filter); selectedWords.add(stripped); pos = start + stripped.length(); } appendSelectedWords(text, selectedWords, pos, text.length(), filter); int idx = 0; for (String word : selectedWords) { text = text.substring(idx); final String before = text.substring(0, text.indexOf(word)); if (before.length() > 0) textRenderer.append(before, new SimpleTextAttributes(background, foreground, null, style)); idx = text.indexOf(word) + word.length(); textRenderer.append(text.substring(idx - word.length(), idx), new SimpleTextAttributes(background, foreground, null, style | SimpleTextAttributes.STYLE_SEARCH_MATCH)); } final String after = text.substring(idx, text.length()); if (after.length() > 0) textRenderer.append(after, new SimpleTextAttributes(background, foreground, null, style)); } } private static void appendSelectedWords(final String text, final List<String> selectedWords, final int pos, int end, final String filter) { if (pos < end) { final Set<String> filters = SearchableOptionsRegistrar.getInstance().getProcessedWords(filter); final String[] words = text.substring(pos, end).split("[\\W&&[^-]]+"); for (String word : words) { if (filters.contains(PorterStemmerUtil.stem(word.toLowerCase(Locale.US)))) { selectedWords.add(word); } } } } public static List<Set<String>> findKeys(String filter, Set<String> quoted) { filter = processFilter(filter.toLowerCase(Locale.US), quoted); final List<Set<String>> keySetList = new ArrayList<>(); final SearchableOptionsRegistrar optionsRegistrar = SearchableOptionsRegistrar.getInstance(); final Set<String> words = optionsRegistrar.getProcessedWords(filter); for (String word : words) { final Set<OptionDescription> descriptions = ((SearchableOptionsRegistrarImpl)optionsRegistrar).getAcceptableDescriptions(word); Set<String> keySet = new HashSet<>(); if (descriptions != null) { for (OptionDescription description : descriptions) { keySet.add(description.getPath()); } } keySetList.add(keySet); } if (keySetList.isEmpty() && !StringUtil.isEmptyOrSpaces(filter)) { keySetList.add(Collections.singleton(filter)); } return keySetList; } public static String processFilter(String filter, Set<String> quoted) { String withoutQuoted = ""; int beg = 0; final Matcher matcher = QUOTED.matcher(filter); while (matcher.find()) { final int start = matcher.start(1); withoutQuoted += " " + filter.substring(beg, start); beg = matcher.end(1); final String trimmed = filter.substring(start, beg).trim(); if (trimmed.length() > 0) { quoted.add(trimmed); } } return withoutQuoted + " " + filter.substring(beg); } public static List<Configurable> expand(ConfigurableGroup[] groups) { final ArrayList<Configurable> result = new ArrayList<>(); for (ConfigurableGroup eachGroup : groups) { result.addAll(expandGroup(eachGroup)); } return result; } public static List<Configurable> expandGroup(final ConfigurableGroup group) { final Configurable[] configurables = group.getConfigurables(); List<Configurable> result = new ArrayList<>(); ContainerUtil.addAll(result, configurables); for (Configurable each : configurables) { addChildren(each, result); } return ContainerUtil.filter(result, configurable -> !(configurable instanceof SearchableConfigurable.Parent) || ((SearchableConfigurable.Parent)configurable).isVisible()); } private static void addChildren(Configurable configurable, List<Configurable> list) { if (configurable instanceof Configurable.Composite) { final Configurable[] kids = ((Configurable.Composite)configurable).getConfigurables(); for (Configurable eachKid : kids) { list.add(eachKid); addChildren(eachKid, list); } } } }
platform/platform-impl/src/com/intellij/ide/ui/search/SearchUtil.java
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.ui.search; import com.intellij.application.options.SkipSelfSearchComponent; import com.intellij.ide.actions.ShowSettingsUtilImpl; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.ConfigurableGroup; import com.intellij.openapi.options.MasterDetails; import com.intellij.openapi.options.SearchableConfigurable; import com.intellij.openapi.options.ex.GlassPanel; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.JBColor; import com.intellij.ui.SimpleColoredComponent; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.TabbedPaneWrapper; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.TitledBorder; import java.awt.*; import java.util.*; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author anna * @since 07.02.2006 */ public class SearchUtil { private static final Pattern HTML_PATTERN = Pattern.compile("<[^<>]*>"); private static final Pattern QUOTED = Pattern.compile("\"([^\"]+)\""); public static final String HIGHLIGHT_WITH_BORDER = "searchUtil.highlightWithBorder"; public static final String STYLE_END = "</style>"; private SearchUtil() { } public static void processProjectConfigurables(Project project, Map<SearchableConfigurable, Set<OptionDescription>> options) { processConfigurables(ShowSettingsUtilImpl.getConfigurables(project, false), options); } private static void processConfigurables(Configurable[] configurables, Map<SearchableConfigurable, Set<OptionDescription>> options) { for (Configurable configurable : configurables) { if (configurable instanceof SearchableConfigurable) { Set<OptionDescription> configurableOptions = new TreeSet<>(); if (configurable instanceof Configurable.Composite) { final Configurable[] children = ((Configurable.Composite)configurable).getConfigurables(); processConfigurables(children, options); } //ignore invisible root nodes if (configurable instanceof SearchableConfigurable.Parent && !((SearchableConfigurable.Parent)configurable).isVisible()) { continue; } options.put((SearchableConfigurable)configurable, configurableOptions); if (configurable instanceof MasterDetails) { final MasterDetails md = (MasterDetails)configurable; md.initUi(); processComponent(configurable, configurableOptions, md.getMaster()); processComponent(configurable, configurableOptions, md.getDetails().getComponent()); } else { processComponent(configurable, configurableOptions, configurable.createComponent()); } } } } private static void processComponent(Configurable configurable, Set<OptionDescription> configurableOptions, JComponent component) { if (component != null) { processUILabel(configurable.getDisplayName(), configurableOptions, null); processComponent(component, configurableOptions, null); } } private static void processComponent(JComponent component, Set<OptionDescription> configurableOptions, String path) { if (component instanceof SkipSelfSearchComponent) return; final Border border = component.getBorder(); if (border instanceof TitledBorder) { final TitledBorder titledBorder = (TitledBorder)border; final String title = titledBorder.getTitle(); if (title != null) { processUILabel(title, configurableOptions, path); } } if (component instanceof JLabel) { final String label = ((JLabel)component).getText(); if (label != null) { processUILabel(label, configurableOptions, path); } } else if (component instanceof JCheckBox) { final String checkBoxTitle = ((JCheckBox)component).getText(); if (checkBoxTitle != null) { processUILabel(checkBoxTitle, configurableOptions, path); } } else if (component instanceof JRadioButton) { final String radioButtonTitle = ((JRadioButton)component).getText(); if (radioButtonTitle != null) { processUILabel(radioButtonTitle, configurableOptions, path); } } else if (component instanceof JButton) { final String buttonTitle = ((JButton)component).getText(); if (buttonTitle != null) { processUILabel(buttonTitle, configurableOptions, path); } } if (component instanceof JTabbedPane) { final JTabbedPane tabbedPane = (JTabbedPane)component; final int tabCount = tabbedPane.getTabCount(); for (int i = 0; i < tabCount; i++) { final String title = path != null ? path + '.' + tabbedPane.getTitleAt(i) : tabbedPane.getTitleAt(i); processUILabel(title, configurableOptions, title); final Component tabComponent = tabbedPane.getComponentAt(i); if (tabComponent instanceof JComponent) { processComponent((JComponent)tabComponent, configurableOptions, title); } } } else { final Component[] components = component.getComponents(); if (components != null) { for (Component child : components) { if (child instanceof JComponent) { processComponent((JComponent)child, configurableOptions, path); } } } } } private static void processUILabel(String title, Set<OptionDescription> configurableOptions, String path) { final Set<String> words = SearchableOptionsRegistrar.getInstance().getProcessedWordsWithoutStemming(title); final String regex = "[\\W&&[^\\p{Punct}\\p{Blank}]]"; for (String option : words) { configurableOptions.add(new OptionDescription(option, HTML_PATTERN.matcher(title).replaceAll(" ").replaceAll(regex, " "), path)); } } public static Runnable lightOptions(SearchableConfigurable configurable, JComponent component, String option, GlassPanel glassPanel) { return () -> { if (!traverseComponentsTree(configurable, glassPanel, component, option, true)) { traverseComponentsTree(configurable, glassPanel, component, option, false); } }; } private static int getSelection(String tabIdx, final JTabbedPane tabbedPane) { SearchableOptionsRegistrar searchableOptionsRegistrar = SearchableOptionsRegistrar.getInstance(); for (int i = 0; i < tabbedPane.getTabCount(); i++) { final Set<String> pathWords = searchableOptionsRegistrar.getProcessedWords(tabIdx); final String title = tabbedPane.getTitleAt(i); if (!pathWords.isEmpty()) { final Set<String> titleWords = searchableOptionsRegistrar.getProcessedWords(title); pathWords.removeAll(titleWords); if (pathWords.isEmpty()) return i; } else if (tabIdx.equalsIgnoreCase(title)) { //e.g. only stop words return i; } } return -1; } public static int getSelection(String tabIdx, final TabbedPaneWrapper tabbedPane) { SearchableOptionsRegistrar searchableOptionsRegistrar = SearchableOptionsRegistrar.getInstance(); for (int i = 0; i < tabbedPane.getTabCount(); i++) { final Set<String> pathWords = searchableOptionsRegistrar.getProcessedWords(tabIdx); final String title = tabbedPane.getTitleAt(i); final Set<String> titleWords = searchableOptionsRegistrar.getProcessedWords(title); pathWords.removeAll(titleWords); if (pathWords.isEmpty()) return i; } return -1; } private static boolean traverseComponentsTree(SearchableConfigurable configurable, GlassPanel glassPanel, JComponent rootComponent, String option, boolean force) { rootComponent.putClientProperty(HIGHLIGHT_WITH_BORDER, null); if (option == null || option.trim().length() == 0) return false; boolean highlight = false; if (rootComponent instanceof JCheckBox) { final JCheckBox checkBox = ((JCheckBox)rootComponent); if (isComponentHighlighted(checkBox.getText(), option, force, configurable)) { highlight = true; glassPanel.addSpotlight(checkBox); } } else if (rootComponent instanceof JRadioButton) { final JRadioButton radioButton = ((JRadioButton)rootComponent); if (isComponentHighlighted(radioButton.getText(), option, force, configurable)) { highlight = true; glassPanel.addSpotlight(radioButton); } } else if (rootComponent instanceof JLabel) { final JLabel label = ((JLabel)rootComponent); if (isComponentHighlighted(label.getText(), option, force, configurable)) { highlight = true; glassPanel.addSpotlight(label); } } else if (rootComponent instanceof JButton) { final JButton button = ((JButton)rootComponent); if (isComponentHighlighted(button.getText(), option, force, configurable)) { highlight = true; glassPanel.addSpotlight(button); } } else if (rootComponent instanceof JTabbedPane) { final JTabbedPane tabbedPane = (JTabbedPane)rootComponent; final String path = SearchableOptionsRegistrar.getInstance().getInnerPath(configurable, option); if (path != null) { final int index = getSelection(path, tabbedPane); if (index > -1 && index < tabbedPane.getTabCount()) { if (tabbedPane.getTabComponentAt(index) instanceof JComponent) { glassPanel.addSpotlight((JComponent)tabbedPane.getTabComponentAt(index)); } } } } else if (rootComponent instanceof TabbedPaneWrapper.TabbedPaneHolder) { final TabbedPaneWrapper tabbedPaneWrapper = ((TabbedPaneWrapper.TabbedPaneHolder)rootComponent).getTabbedPaneWrapper(); final String path = SearchableOptionsRegistrar.getInstance().getInnerPath(configurable, option); if (path != null) { final int index = getSelection(path, tabbedPaneWrapper); if (index > -1 && index < tabbedPaneWrapper.getTabCount()) { glassPanel.addSpotlight((JComponent)tabbedPaneWrapper.getTabComponentAt(index)); } } } final Component[] components = rootComponent.getComponents(); for (Component component : components) { if (component instanceof JComponent) { final boolean innerHighlight = traverseComponentsTree(configurable, glassPanel, (JComponent)component, option, force); if (!highlight && !innerHighlight) { final Border border = rootComponent.getBorder(); if (border instanceof TitledBorder) { final String title = ((TitledBorder)border).getTitle(); if (isComponentHighlighted(title, option, force, configurable)) { highlight = true; glassPanel.addSpotlight(rootComponent); rootComponent.putClientProperty(HIGHLIGHT_WITH_BORDER, Boolean.TRUE); } } } if (innerHighlight) { highlight = true; } } } return highlight; } public static boolean isComponentHighlighted(String text, String option, boolean force, final SearchableConfigurable configurable) { if (text == null || option == null || option.length() == 0) return false; final SearchableOptionsRegistrar searchableOptionsRegistrar = SearchableOptionsRegistrar.getInstance(); final Set<String> words = searchableOptionsRegistrar.getProcessedWords(option); final Set<String> options = configurable != null ? searchableOptionsRegistrar.replaceSynonyms(words, configurable) : words; if (options == null || options.isEmpty()) { return text.toLowerCase(Locale.US).contains(option.toLowerCase(Locale.US)); } final Set<String> tokens = searchableOptionsRegistrar.getProcessedWords(text); if (!force) { options.retainAll(tokens); final boolean highlight = !options.isEmpty(); return highlight || text.toLowerCase(Locale.US).contains(option.toLowerCase(Locale.US)); } else { options.removeAll(tokens); return options.isEmpty(); } } public static String markup(@NotNull String textToMarkup, @Nullable String filter) { if (filter == null || filter.length() == 0) { return textToMarkup; } int bodyStart = textToMarkup.indexOf("<body>"); final int bodyEnd = textToMarkup.indexOf("</body>"); final String head; final String foot; if (bodyStart >= 0) { bodyStart += "<body>".length(); head = textToMarkup.substring(0, bodyStart); if (bodyEnd >= 0) { foot = textToMarkup.substring(bodyEnd); } else { foot = ""; } textToMarkup = textToMarkup.substring(bodyStart, bodyEnd); } else { foot = ""; head = ""; } final Pattern insideHtmlTagPattern = Pattern.compile("[<[^<>]*>]*<[^<>]*"); final SearchableOptionsRegistrar registrar = SearchableOptionsRegistrar.getInstance(); final HashSet<String> quoted = new HashSet<>(); filter = processFilter(quoteStrictOccurrences(textToMarkup, filter), quoted); final Set<String> options = registrar.getProcessedWords(filter); final Set<String> words = registrar.getProcessedWords(textToMarkup); for (String option : options) { if (words.contains(option)) { textToMarkup = markup(textToMarkup, insideHtmlTagPattern, option); } } for (String stripped : quoted) { if (registrar.isStopWord(stripped)) continue; textToMarkup = markup(textToMarkup, insideHtmlTagPattern, stripped); } return head + textToMarkup + foot; } private static String quoteStrictOccurrences(final String textToMarkup, final String filter) { String cur = ""; final String s = textToMarkup.toLowerCase(Locale.US); for (String part : filter.split(" ")) { if (s.contains(part)) { cur += "\"" + part + "\" "; } else { cur += part + " "; } } return cur; } private static String markup(String textToMarkup, final Pattern insideHtmlTagPattern, final String option) { final int styleIdx = textToMarkup.indexOf("<style"); final int styleEndIdx = textToMarkup.indexOf("</style>"); if (styleIdx < 0 || styleEndIdx < 0) { return markupInText(textToMarkup, insideHtmlTagPattern, option); } return markup(textToMarkup.substring(0, styleIdx), insideHtmlTagPattern, option) + markup(textToMarkup.substring(styleEndIdx + STYLE_END.length()), insideHtmlTagPattern, option); } private static String markupInText(String textToMarkup, Pattern insideHtmlTagPattern, String option) { String result = ""; int beg = 0; int idx; while ((idx = StringUtil.indexOfIgnoreCase(textToMarkup, option, beg)) != -1) { final String prefix = textToMarkup.substring(beg, idx); final String toMark = textToMarkup.substring(idx, idx + option.length()); if (insideHtmlTagPattern.matcher(prefix).matches()) { final int lastIdx = textToMarkup.indexOf(">", idx); result += prefix + textToMarkup.substring(idx, lastIdx + 1); beg = lastIdx + 1; } else { result += prefix + "<font color='#ffffff' bgColor='#1d5da7'>" + toMark + "</font>"; beg = idx + option.length(); } } result += textToMarkup.substring(beg); return result; } public static void appendFragments(String filter, String text, @SimpleTextAttributes.StyleAttributeConstant int style, final Color foreground, final Color background, final SimpleColoredComponent textRenderer) { if (text == null) return; if (filter == null || filter.length() == 0) { textRenderer.append(text, new SimpleTextAttributes(background, foreground, JBColor.RED, style)); } else { //markup final HashSet<String> quoted = new HashSet<>(); filter = processFilter(quoteStrictOccurrences(text, filter), quoted); final TreeMap<Integer, String> indx = new TreeMap<>(); for (String stripped : quoted) { int beg = 0; int idx; while ((idx = StringUtil.indexOfIgnoreCase(text, stripped, beg)) != -1) { indx.put(idx, text.substring(idx, idx + stripped.length())); beg = idx + stripped.length(); } } final List<String> selectedWords = new ArrayList<>(); int pos = 0; for (Integer index : indx.keySet()) { final String stripped = indx.get(index); final int start = index.intValue(); if (pos > start) { final String highlighted = selectedWords.get(selectedWords.size() - 1); if (highlighted.length() < stripped.length()) { selectedWords.remove(highlighted); } else { continue; } } appendSelectedWords(text, selectedWords, pos, start, filter); selectedWords.add(stripped); pos = start + stripped.length(); } appendSelectedWords(text, selectedWords, pos, text.length(), filter); int idx = 0; for (String word : selectedWords) { text = text.substring(idx); final String before = text.substring(0, text.indexOf(word)); if (before.length() > 0) textRenderer.append(before, new SimpleTextAttributes(background, foreground, null, style)); idx = text.indexOf(word) + word.length(); textRenderer.append(text.substring(idx - word.length(), idx), new SimpleTextAttributes(background, foreground, null, style | SimpleTextAttributes.STYLE_SEARCH_MATCH)); } final String after = text.substring(idx, text.length()); if (after.length() > 0) textRenderer.append(after, new SimpleTextAttributes(background, foreground, null, style)); } } private static void appendSelectedWords(final String text, final List<String> selectedWords, final int pos, int end, final String filter) { if (pos < end) { final Set<String> filters = SearchableOptionsRegistrar.getInstance().getProcessedWords(filter); final String[] words = text.substring(pos, end).split("[\\W&&[^-]]+"); for (String word : words) { if (filters.contains(PorterStemmerUtil.stem(word.toLowerCase(Locale.US)))) { selectedWords.add(word); } } } } public static List<Set<String>> findKeys(String filter, Set<String> quoted) { filter = processFilter(filter.toLowerCase(Locale.US), quoted); final List<Set<String>> keySetList = new ArrayList<>(); final SearchableOptionsRegistrar optionsRegistrar = SearchableOptionsRegistrar.getInstance(); final Set<String> words = optionsRegistrar.getProcessedWords(filter); for (String word : words) { final Set<OptionDescription> descriptions = ((SearchableOptionsRegistrarImpl)optionsRegistrar).getAcceptableDescriptions(word); Set<String> keySet = new HashSet<>(); if (descriptions != null) { for (OptionDescription description : descriptions) { keySet.add(description.getPath()); } } keySetList.add(keySet); } if (keySetList.isEmpty() && !StringUtil.isEmptyOrSpaces(filter)) { keySetList.add(Collections.singleton(filter)); } return keySetList; } public static String processFilter(String filter, Set<String> quoted) { String withoutQuoted = ""; int beg = 0; final Matcher matcher = QUOTED.matcher(filter); while (matcher.find()) { final int start = matcher.start(1); withoutQuoted += " " + filter.substring(beg, start); beg = matcher.end(1); final String trimmed = filter.substring(start, beg).trim(); if (trimmed.length() > 0) { quoted.add(trimmed); } } return withoutQuoted + " " + filter.substring(beg); } public static List<Configurable> expand(ConfigurableGroup[] groups) { final ArrayList<Configurable> result = new ArrayList<>(); for (ConfigurableGroup eachGroup : groups) { result.addAll(expandGroup(eachGroup)); } return result; } public static List<Configurable> expandGroup(final ConfigurableGroup group) { final Configurable[] configurables = group.getConfigurables(); List<Configurable> result = new ArrayList<>(); ContainerUtil.addAll(result, configurables); for (Configurable each : configurables) { addChildren(each, result); } return ContainerUtil.filter(result, configurable -> !(configurable instanceof SearchableConfigurable.Parent) || ((SearchableConfigurable.Parent)configurable).isVisible()); } private static void addChildren(Configurable configurable, List<Configurable> list) { if (configurable instanceof Configurable.Composite) { final Configurable[] kids = ((Configurable.Composite)configurable).getConfigurables(); for (Configurable eachKid : kids) { list.add(eachKid); addChildren(eachKid, list); } } } }
assign paths for jbTabs titles (IDEA-163178)
platform/platform-impl/src/com/intellij/ide/ui/search/SearchUtil.java
assign paths for jbTabs titles (IDEA-163178)
<ide><path>latform/platform-impl/src/com/intellij/ide/ui/search/SearchUtil.java <ide> import com.intellij.ui.SimpleColoredComponent; <ide> import com.intellij.ui.SimpleTextAttributes; <ide> import com.intellij.ui.TabbedPaneWrapper; <add>import com.intellij.ui.tabs.JBTabs; <add>import com.intellij.ui.tabs.TabInfo; <ide> import com.intellij.util.containers.ContainerUtil; <ide> import org.jetbrains.annotations.NotNull; <ide> import org.jetbrains.annotations.Nullable; <ide> final Component tabComponent = tabbedPane.getComponentAt(i); <ide> if (tabComponent instanceof JComponent) { <ide> processComponent((JComponent)tabComponent, configurableOptions, title); <add> } <add> } <add> } <add> else if (component instanceof JBTabs) { <add> final JBTabs tabbedPane = (JBTabs)component; <add> final int tabCount = tabbedPane.getTabCount(); <add> for (int i = 0; i < tabCount; i++) { <add> TabInfo tabInfo = tabbedPane.getTabAt(i); <add> String tabTitle = tabInfo.getText(); <add> final String title = path != null ? path + '.' + tabTitle : tabTitle; <add> processUILabel(title, configurableOptions, title); <add> final JComponent tabComponent = tabInfo.getComponent(); <add> if (tabComponent != null) { <add> processComponent(tabComponent, configurableOptions, title); <ide> } <ide> } <ide> }
Java
apache-2.0
8d125d9c4101a04b779326d537cbd45861f2293e
0
andrhamm/Singularity,hs-jenkins-bot/Singularity,andrhamm/Singularity,hs-jenkins-bot/Singularity,grepsr/Singularity,grepsr/Singularity,hs-jenkins-bot/Singularity,grepsr/Singularity,HubSpot/Singularity,andrhamm/Singularity,HubSpot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,andrhamm/Singularity,grepsr/Singularity,HubSpot/Singularity,andrhamm/Singularity,hs-jenkins-bot/Singularity,grepsr/Singularity,grepsr/Singularity,HubSpot/Singularity
package com.hubspot.singularity; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Nonnull; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.hubspot.mesos.JavaUtils; public class SingularityPendingTask { private final SingularityPendingTaskId pendingTaskId; private final List<String> cmdLineArgsList; private final Optional<String> user; private final Optional<String> runId; private final Map<SlaveMatchState, List<String>> unmatchedOffers; public static Predicate<SingularityPendingTask> matchingRequest(final String requestId) { return new Predicate<SingularityPendingTask>() { @Override public boolean apply(@Nonnull SingularityPendingTask input) { return input.getPendingTaskId().getRequestId().equals(requestId); } }; } public static Predicate<SingularityPendingTask> matchingDeploy(final String deployId) { return new Predicate<SingularityPendingTask>() { @Override public boolean apply(@Nonnull SingularityPendingTask input) { return input.getPendingTaskId().getDeployId().equals(deployId); } }; } public SingularityPendingTask(SingularityPendingTaskId pendingTaskId, List<String> cmdLineArgsList, Optional<String> user, Optional<String> runId) { this(pendingTaskId, cmdLineArgsList, user, runId, new HashMap<SlaveMatchState, List<String>>()); } @JsonCreator public SingularityPendingTask(@JsonProperty("pendingTaskId") SingularityPendingTaskId pendingTaskId, @JsonProperty("cmdLineArgsList") List<String> cmdLineArgsList, @JsonProperty("user") Optional<String> user, @JsonProperty("runId") Optional<String> runId, @JsonProperty("unmatchedOffers") Map<SlaveMatchState, List<String>> unmatchedOffers) { this.pendingTaskId = pendingTaskId; this.user = user; this.cmdLineArgsList = JavaUtils.nonNullImmutable(cmdLineArgsList); this.runId = runId; this.unmatchedOffers = unmatchedOffers == null ? new HashMap<SlaveMatchState, List<String>>() : unmatchedOffers; } @Override public int hashCode() { return Objects.hashCode(pendingTaskId); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } SingularityPendingTask other = (SingularityPendingTask) obj; return Objects.equals(pendingTaskId, other.getPendingTaskId()); } public Optional<String> getUser() { return user; } public SingularityPendingTaskId getPendingTaskId() { return pendingTaskId; } public List<String> getCmdLineArgsList() { return cmdLineArgsList; } public Optional<String> getRunId() { return runId; } public Map<SlaveMatchState, List<String>> getUnmatchedOffers() { return unmatchedOffers; } public void addUnmatchedOffer(String host, SlaveMatchState reason) { if (unmatchedOffers.containsKey(reason)) { unmatchedOffers.get(reason).add(host); } else { unmatchedOffers.put(reason, Arrays.asList(host)); } } public void clearUnmatchedOffers() { unmatchedOffers.clear(); } @Override public String toString() { return "SingularityPendingTask [pendingTaskId=" + pendingTaskId + ", cmdLineArgsList=" + cmdLineArgsList + ", user=" + user + ", runId=" + runId + ", unmatchedOffers=" + unmatchedOffers + "]"; } }
SingularityBase/src/main/java/com/hubspot/singularity/SingularityPendingTask.java
package com.hubspot.singularity; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import javax.annotation.Nonnull; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.hubspot.mesos.JavaUtils; public class SingularityPendingTask { private final SingularityPendingTaskId pendingTaskId; private final List<String> cmdLineArgsList; private final Optional<String> user; private final Optional<String> runId; private final Map<String, SlaveMatchState> unmatchedOffers; public static Predicate<SingularityPendingTask> matchingRequest(final String requestId) { return new Predicate<SingularityPendingTask>() { @Override public boolean apply(@Nonnull SingularityPendingTask input) { return input.getPendingTaskId().getRequestId().equals(requestId); } }; } public static Predicate<SingularityPendingTask> matchingDeploy(final String deployId) { return new Predicate<SingularityPendingTask>() { @Override public boolean apply(@Nonnull SingularityPendingTask input) { return input.getPendingTaskId().getDeployId().equals(deployId); } }; } @JsonCreator public SingularityPendingTask(@JsonProperty("pendingTaskId") SingularityPendingTaskId pendingTaskId, @JsonProperty("cmdLineArgsList") List<String> cmdLineArgsList, @JsonProperty("user") Optional<String> user, @JsonProperty("runId") Optional<String> runId, @JsonProperty("unmatchedOffers") Map<String, SlaveMatchState> unmatchedOffers) { this.pendingTaskId = pendingTaskId; this.user = user; this.cmdLineArgsList = JavaUtils.nonNullImmutable(cmdLineArgsList); this.runId = runId; this.unmatchedOffers = unmatchedOffers == null ? new HashMap<String, SlaveMatchState>() : unmatchedOffers; } @Override public int hashCode() { return Objects.hashCode(pendingTaskId); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } SingularityPendingTask other = (SingularityPendingTask) obj; return Objects.equals(pendingTaskId, other.getPendingTaskId()); } public Optional<String> getUser() { return user; } public SingularityPendingTaskId getPendingTaskId() { return pendingTaskId; } public List<String> getCmdLineArgsList() { return cmdLineArgsList; } public Optional<String> getRunId() { return runId; } public Map<String, SlaveMatchState> getUnmatchedOffers() { return unmatchedOffers; } public void addUnmatchedOffer(String host, SlaveMatchState reason) { unmatchedOffers.put(host, reason); } public void clearUnmatchedOffers() { unmatchedOffers.clear(); } @Override public String toString() { return "SingularityPendingTask [pendingTaskId=" + pendingTaskId + ", cmdLineArgsList=" + cmdLineArgsList + ", user=" + user + ", runId=" + runId + ", unmatchedOffers=" + unmatchedOffers + "]"; } }
save list of hosts under each slave match state
SingularityBase/src/main/java/com/hubspot/singularity/SingularityPendingTask.java
save list of hosts under each slave match state
<ide><path>ingularityBase/src/main/java/com/hubspot/singularity/SingularityPendingTask.java <ide> package com.hubspot.singularity; <ide> <add>import java.util.Arrays; <ide> import java.util.HashMap; <ide> import java.util.List; <ide> import java.util.Map; <ide> private final List<String> cmdLineArgsList; <ide> private final Optional<String> user; <ide> private final Optional<String> runId; <del> private final Map<String, SlaveMatchState> unmatchedOffers; <add> private final Map<SlaveMatchState, List<String>> unmatchedOffers; <ide> <ide> public static Predicate<SingularityPendingTask> matchingRequest(final String requestId) { <ide> return new Predicate<SingularityPendingTask>() { <ide> }; <ide> } <ide> <add> public SingularityPendingTask(SingularityPendingTaskId pendingTaskId, List<String> cmdLineArgsList, Optional<String> user, Optional<String> runId) { <add> this(pendingTaskId, cmdLineArgsList, user, runId, new HashMap<SlaveMatchState, List<String>>()); <add> } <add> <ide> @JsonCreator <ide> public SingularityPendingTask(@JsonProperty("pendingTaskId") SingularityPendingTaskId pendingTaskId, @JsonProperty("cmdLineArgsList") List<String> cmdLineArgsList, <del> @JsonProperty("user") Optional<String> user, @JsonProperty("runId") Optional<String> runId, @JsonProperty("unmatchedOffers") Map<String, SlaveMatchState> unmatchedOffers) { <add> @JsonProperty("user") Optional<String> user, @JsonProperty("runId") Optional<String> runId, @JsonProperty("unmatchedOffers") Map<SlaveMatchState, List<String>> unmatchedOffers) { <ide> this.pendingTaskId = pendingTaskId; <ide> this.user = user; <ide> this.cmdLineArgsList = JavaUtils.nonNullImmutable(cmdLineArgsList); <ide> this.runId = runId; <del> this.unmatchedOffers = unmatchedOffers == null ? new HashMap<String, SlaveMatchState>() : unmatchedOffers; <add> this.unmatchedOffers = unmatchedOffers == null ? new HashMap<SlaveMatchState, List<String>>() : unmatchedOffers; <ide> } <ide> <ide> @Override <ide> return runId; <ide> } <ide> <del> public Map<String, SlaveMatchState> getUnmatchedOffers() { <add> public Map<SlaveMatchState, List<String>> getUnmatchedOffers() { <ide> return unmatchedOffers; <ide> } <ide> <ide> public void addUnmatchedOffer(String host, SlaveMatchState reason) { <del> unmatchedOffers.put(host, reason); <add> if (unmatchedOffers.containsKey(reason)) { <add> unmatchedOffers.get(reason).add(host); <add> } else { <add> unmatchedOffers.put(reason, Arrays.asList(host)); <add> } <ide> } <ide> <ide> public void clearUnmatchedOffers() {
Java
mit
80054051846ab59365defd79d63c4cfc1533f30d
0
robert-schmidtke/flink-xtreemfs-datalocality-test
package org.xtreemfs.flink; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.aggregation.Aggregations; import org.apache.flink.api.java.io.TypeSerializerInputFormat; import org.apache.flink.api.java.tuple.Tuple3; public class DataLocalityTest { public static void main(String[] args) throws Exception { final ExecutionEnvironment env = ExecutionEnvironment .getExecutionEnvironment(); if (args.length != 1) { System.err .println("Invoke with one positional parameter: the number of OSDs."); System.exit(1); } int osdCount = 0; try { osdCount = Integer.parseInt(args[0]); } catch (NumberFormatException e) { System.err.println("Bad number of OSD argument: " + args[0]); System.exit(1); } final String workingDirectory = System.getenv("WORK"); if (workingDirectory == null) { System.err .println("$WORK must point to an XtreemFS volume mount point (as a file system path)."); System.exit(1); } final String defaultVolume = System.getenv("DEFAULT_VOLUME"); if (defaultVolume == null) { System.err .println("$DEFAULT_VOLUME must point to an XtreemFS volume URL ('xtreemfs://hostname:port/volume')."); System.exit(1); } // Generate enough data to distribute among the OSDs. DataOutputStream out = new DataOutputStream(new BufferedOutputStream( new FileOutputStream(workingDirectory + "/data.bin"))); // Each entry is 8 bytes and we want 128 kilobytes per OSD. for (int i = 0; i < osdCount * 128 * 1024 / 8; ++i) { // Always write the same value to each OSD. out.writeLong((i / (128 * 1024 / 8)) % osdCount); } out.close(); // Use words as input to Flink wordcount Job. DataSet<Long> input = env.readFile(new TypeSerializerInputFormat<Long>( BasicTypeInfo.LONG_TYPE_INFO), workingDirectory + "/data.bin"); DataSet<Long> filtered = input; // .filter(new FilterFunction<Long>() { // // private static final long serialVersionUID = -7778608339455035028L; // // @Override // public boolean filter(Long arg0) throws Exception { // return arg0 % 2 == 0; // } // // }); DataSet<Tuple3<Long, Integer, String>> counts = filtered .map(new MapFunction<Long, Tuple3<Long, Integer, String>>() { private static final long serialVersionUID = 7917635531979595929L; @Override public Tuple3<Long, Integer, String> map(Long arg0) throws Exception { return new Tuple3<Long, Integer, String>(arg0, 1, System.getenv("HOSTNAME")); } }).groupBy(2).aggregate(Aggregations.SUM, 1) .aggregate(Aggregations.MAX, 0); counts.print(); File file = new File(workingDirectory + "/data.bin"); System.out.println(file.length() + " bytes"); file.delete(); } }
datalocality/src/main/java/org/xtreemfs/flink/DataLocalityTest.java
package org.xtreemfs.flink; import java.io.BufferedOutputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileOutputStream; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.java.DataSet; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.aggregation.Aggregations; import org.apache.flink.api.java.tuple.Tuple3; public class DataLocalityTest { public static void main(String[] args) throws Exception { final ExecutionEnvironment env = ExecutionEnvironment .getExecutionEnvironment(); if (args.length != 1) { System.err .println("Invoke with one positional parameter: the number of OSDs."); System.exit(1); } int osdCount = 0; try { osdCount = Integer.parseInt(args[0]); } catch (NumberFormatException e) { System.err.println("Bad number of OSD argument: " + args[0]); System.exit(1); } final String workingDirectory = System.getenv("WORK"); if (workingDirectory == null) { System.err .println("$WORK must point to an XtreemFS volume mount point (as a file system path)."); System.exit(1); } final String defaultVolume = System.getenv("DEFAULT_VOLUME"); if (defaultVolume == null) { System.err .println("$DEFAULT_VOLUME must point to an XtreemFS volume URL ('xtreemfs://hostname:port/volume')."); System.exit(1); } // Generate enough data to distribute among the OSDs. DataOutputStream out = new DataOutputStream(new BufferedOutputStream( new FileOutputStream(workingDirectory + "/data.bin"))); // Each entry is 8 bytes and we want 128 kilobytes per OSD. for (int i = 0; i < osdCount * 128 * 1024 / 8; ++i) { // Always write the same value to each OSD. out.writeLong((i / (128 * 1024 / 8)) % osdCount); } out.close(); // Use words as input to Flink wordcount Job. DataSet<Long> input = env.readFileOfPrimitives(workingDirectory + "/data.bin", Long.class); DataSet<Long> filtered = input; // .filter(new FilterFunction<Long>() { // // private static final long serialVersionUID = -7778608339455035028L; // // @Override // public boolean filter(Long arg0) throws Exception { // return arg0 % 2 == 0; // } // // }); DataSet<Tuple3<Long, Integer, String>> counts = filtered .map(new MapFunction<Long, Tuple3<Long, Integer, String>>() { private static final long serialVersionUID = 7917635531979595929L; @Override public Tuple3<Long, Integer, String> map(Long arg0) throws Exception { return new Tuple3<Long, Integer, String>(arg0, 1, System.getenv("HOSTNAME")); } }).groupBy(2).aggregate(Aggregations.MAX, 1) .aggregate(Aggregations.MAX, 0); counts.print(); File file = new File(workingDirectory + "/data.bin"); System.out.println(file.length() + " bytes"); file.delete(); } }
back to custom input format, summing the counts
datalocality/src/main/java/org/xtreemfs/flink/DataLocalityTest.java
back to custom input format, summing the counts
<ide><path>atalocality/src/main/java/org/xtreemfs/flink/DataLocalityTest.java <ide> import java.io.FileOutputStream; <ide> <ide> import org.apache.flink.api.common.functions.MapFunction; <add>import org.apache.flink.api.common.typeinfo.BasicTypeInfo; <ide> import org.apache.flink.api.java.DataSet; <ide> import org.apache.flink.api.java.ExecutionEnvironment; <ide> import org.apache.flink.api.java.aggregation.Aggregations; <add>import org.apache.flink.api.java.io.TypeSerializerInputFormat; <ide> import org.apache.flink.api.java.tuple.Tuple3; <ide> <ide> public class DataLocalityTest { <ide> out.close(); <ide> <ide> // Use words as input to Flink wordcount Job. <del> DataSet<Long> input = env.readFileOfPrimitives(workingDirectory <del> + "/data.bin", Long.class); <add> DataSet<Long> input = env.readFile(new TypeSerializerInputFormat<Long>( <add> BasicTypeInfo.LONG_TYPE_INFO), workingDirectory + "/data.bin"); <ide> <ide> DataSet<Long> filtered = input; <ide> // .filter(new FilterFunction<Long>() { <ide> System.getenv("HOSTNAME")); <ide> } <ide> <del> }).groupBy(2).aggregate(Aggregations.MAX, 1) <add> }).groupBy(2).aggregate(Aggregations.SUM, 1) <ide> .aggregate(Aggregations.MAX, 0); <ide> <ide> counts.print();
Java
epl-1.0
61b0e1d35c8de9ded79be6867ed4f63f380c4639
0
DavidGutknecht/elexis-3-base,DavidGutknecht/elexis-3-base,DavidGutknecht/elexis-3-base,DavidGutknecht/elexis-3-base,DavidGutknecht/elexis-3-base
/******************************************************************************* * Copyright (c) 2007-2010, G. Weirich and Elexis * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * G. Weirich - initial API and implementation ******************************************************************************/ package ch.elexis.laborimport.hl7.universal; import java.io.File; import java.io.FilenameFilter; import java.util.Arrays; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IAction; import ch.elexis.core.data.activator.CoreHub; import ch.elexis.core.data.interfaces.events.MessageEvent; import ch.elexis.core.data.util.ResultAdapter; import ch.elexis.core.importer.div.importers.DefaultPersistenceHandler; import ch.elexis.core.importer.div.importers.HL7Parser; import ch.elexis.core.importer.div.importers.multifile.MultiFileParser; import ch.elexis.core.ui.icons.Images; import ch.elexis.core.ui.importer.div.importers.DefaultHL7Parser; import ch.elexis.core.ui.importer.div.importers.multifile.strategy.DefaultImportStrategyFactory; import ch.elexis.core.ui.util.SWTHelper; import ch.rgw.tools.Result; public class Importer extends Action implements IAction { public static final String MY_LAB = "Eigenlabor"; private MultiFileParser mfParser = new MultiFileParser(MY_LAB); private HL7Parser hlp = new DefaultHL7Parser(MY_LAB); public Importer(){ super("Hl7 Datei", Images.IMG_IMPORT.getImageDescriptor()); } @Override public void run(){ if (CoreHub.localCfg.get(Preferences.CFG_DIRECTORY_AUTOIMPORT, false)) { MessageEvent.fireInformation("HL7 Import", "Automatischer Import ist aktiviert."); return; } File dir = new File(CoreHub.localCfg.get(Preferences.CFG_DIRECTORY, File.separator)); if ((!dir.exists()) || (!dir.isDirectory())) { SWTHelper.showError("bad directory for import", "Konfigurationsfehler", "Das Transferverzeichnis ist nicht korrekt eingestellt"); } else { int err = 0; int files = 0; Result<?> r = null; String[] fileNames = dir.list(new FilenameFilter() { public boolean accept(File arg0, String arg1){ if (arg1.toLowerCase().endsWith(".hl7")) { return true; } return false; } }); Arrays.sort(fileNames); for (String fn : fileNames) { files++; File hl7file = new File(dir, fn); r = mfParser.importFromFile(hl7file, new DefaultImportStrategyFactory().setMoveAfterImport(true) .setLabContactResolver(new LinkLabContactResolver()), hlp, new DefaultPersistenceHandler()); } if (err > 0) { if (r != null) { ResultAdapter.displayResult(r, Integer.toString(err) + " von " + Integer.toString(files) + " Dateien hatten Fehler\n"); } else { SWTHelper.showError("HL7 Import Fehler", "Die Dateien aus dem Transferverzeichnis konnten nicht importiert werden."); } } else if (files == 0) { SWTHelper.showInfo("Laborimport", "Es waren keine Dateien zum Import vorhanden"); } else { SWTHelper.showInfo("Laborimport", Integer.toString(files) + " Dateien wurden fehlerfrei verarbeitet."); } } } }
bundles/ch.elexis.laborimport.hl7.allg/src/ch/elexis/laborimport/hl7/universal/Importer.java
/******************************************************************************* * Copyright (c) 2007-2010, G. Weirich and Elexis * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * G. Weirich - initial API and implementation ******************************************************************************/ package ch.elexis.laborimport.hl7.universal; import java.io.File; import java.io.FilenameFilter; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IAction; import ch.elexis.core.data.activator.CoreHub; import ch.elexis.core.data.interfaces.events.MessageEvent; import ch.elexis.core.data.util.ResultAdapter; import ch.elexis.core.importer.div.importers.DefaultPersistenceHandler; import ch.elexis.core.importer.div.importers.HL7Parser; import ch.elexis.core.importer.div.importers.multifile.MultiFileParser; import ch.elexis.core.ui.icons.Images; import ch.elexis.core.ui.importer.div.importers.DefaultHL7Parser; import ch.elexis.core.ui.importer.div.importers.multifile.strategy.DefaultImportStrategyFactory; import ch.elexis.core.ui.util.SWTHelper; import ch.rgw.tools.Result; public class Importer extends Action implements IAction { public static final String MY_LAB = "Eigenlabor"; private MultiFileParser mfParser = new MultiFileParser(MY_LAB); private HL7Parser hlp = new DefaultHL7Parser(MY_LAB); public Importer(){ super("Hl7 Datei", Images.IMG_IMPORT.getImageDescriptor()); } @Override public void run(){ if (CoreHub.localCfg.get(Preferences.CFG_DIRECTORY_AUTOIMPORT, false)) { MessageEvent.fireInformation("HL7 Import", "Automatischer Import ist aktiviert."); return; } File dir = new File(CoreHub.localCfg.get(Preferences.CFG_DIRECTORY, File.separator)); if ((!dir.exists()) || (!dir.isDirectory())) { SWTHelper.showError("bad directory for import", "Konfigurationsfehler", "Das Transferverzeichnis ist nicht korrekt eingestellt"); } else { int err = 0; int files = 0; Result<?> r = null; for (String fn : dir.list(new FilenameFilter() { public boolean accept(File arg0, String arg1){ if (arg1.toLowerCase().endsWith(".hl7")) { return true; } return false; } })) { files++; File hl7file = new File(dir, fn); r = mfParser.importFromFile(hl7file, new DefaultImportStrategyFactory().setMoveAfterImport(true) .setLabContactResolver(new LinkLabContactResolver()), hlp, new DefaultPersistenceHandler()); } if (err > 0) { if (r != null) { ResultAdapter.displayResult(r, Integer.toString(err) + " von " + Integer.toString(files) + " Dateien hatten Fehler\n"); } else { SWTHelper.showError("HL7 Import Fehler", "Die Dateien aus dem Transferverzeichnis konnten nicht importiert werden."); } } else if (files == 0) { SWTHelper.showInfo("Laborimport", "Es waren keine Dateien zum Import vorhanden"); } else { SWTHelper.showInfo("Laborimport", Integer.toString(files) + " Dateien wurden fehlerfrei verarbeitet."); } } } }
[22008] sort files of hl7 import directory for correct order on import
bundles/ch.elexis.laborimport.hl7.allg/src/ch/elexis/laborimport/hl7/universal/Importer.java
[22008] sort files of hl7 import directory for correct order on import
<ide><path>undles/ch.elexis.laborimport.hl7.allg/src/ch/elexis/laborimport/hl7/universal/Importer.java <ide> <ide> import java.io.File; <ide> import java.io.FilenameFilter; <add>import java.util.Arrays; <ide> <ide> import org.eclipse.jface.action.Action; <ide> import org.eclipse.jface.action.IAction; <ide> int err = 0; <ide> int files = 0; <ide> Result<?> r = null; <del> for (String fn : dir.list(new FilenameFilter() { <add> String[] fileNames = dir.list(new FilenameFilter() { <ide> <ide> public boolean accept(File arg0, String arg1){ <ide> if (arg1.toLowerCase().endsWith(".hl7")) { <ide> } <ide> return false; <ide> } <del> })) { <add> }); <add> Arrays.sort(fileNames); <add> for (String fn : fileNames) { <ide> files++; <ide> File hl7file = new File(dir, fn); <ide> r = mfParser.importFromFile(hl7file,
Java
agpl-3.0
dbdefae44a298c670fb3303bf0ab6cd1298ebef8
0
KinshipSoftware/KinOathKinshipArchiver,KinshipSoftware/KinOathKinshipArchiver,PeterWithers/temp-to-delete1,PeterWithers/temp-to-delete1
package nl.mpi.kinnate.kindocument; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.net.URI; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import nl.mpi.arbil.data.ArbilComponentBuilder; import nl.mpi.arbil.userstorage.SessionStorage; import nl.mpi.arbil.util.ArbilBugCatcher; import nl.mpi.kinnate.gedcomimport.ImportException; import nl.mpi.kinnate.kindata.EntityData; import nl.mpi.kinnate.uniqueidentifiers.UniqueIdentifier; import org.w3c.dom.DOMException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * Document : EntityBuilder * Created on : May 30, 2011, 1:25:05 PM * Author : Peter Withers */ public class EntityDocument { File entityFile = null; Document metadataDom = null; Node kinnateNode = null; Element metadataNode = null; Node currentDomNode = null; public EntityData entityData = null; private ImportTranslator importTranslator; public static String defaultEntityType = "individual"; private SessionStorage sessionStorage; public EntityDocument(ImportTranslator importTranslator, SessionStorage sessionStorage) { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; assignIdentiferAndFile(); } public EntityDocument(String entityType, ImportTranslator importTranslator, SessionStorage sessionStorage) throws ImportException { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; assignIdentiferAndFile(); try { // construct the metadata file URI xsdUri = new CmdiTransformer(sessionStorage).getXsdUrlString(entityType); URI addedNodeUri = new ArbilComponentBuilder().createComponentFile(entityFile.toURI(), xsdUri, false); } catch (KinXsdException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } setDomNodesFromExistingFile(); } public EntityDocument(EntityDocument entityDocumentToCopy, ImportTranslator importTranslator, SessionStorage sessionStorage) throws ImportException { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; assignIdentiferAndFile(); try { // load the document that needs to be copied so that it can be saved into the new location metadataDom = ArbilComponentBuilder.getDocument(entityDocumentToCopy.entityFile.toURI()); ArbilComponentBuilder.savePrettyFormatting(metadataDom, entityFile); } catch (IOException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (ParserConfigurationException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (SAXException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } // replace the entity data in the new document setDomNodesFromExistingFile(); } public EntityDocument(File destinationDirectory, String nameString, String entityType, ImportTranslator importTranslator, SessionStorage sessionStorage) throws ImportException { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; String idString; entityData = new EntityData(new UniqueIdentifier(UniqueIdentifier.IdentifierType.lid)); if (nameString != null) { idString = nameString; entityFile = new File(destinationDirectory, nameString + ".kmdi"); } else { idString = entityData.getUniqueIdentifier().getQueryIdentifier() + ".kmdi"; File subDirectory = new File(destinationDirectory, idString.substring(0, 2)); subDirectory.mkdir(); entityFile = new File(subDirectory, idString); } try { // construct the metadata file URI xsdUri = new CmdiTransformer(sessionStorage).getXsdUrlString(entityType); URI addedNodeUri = new ArbilComponentBuilder().createComponentFile(entityFile.toURI(), xsdUri, false); } catch (KinXsdException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } setDomNodesFromExistingFile(); } public EntityDocument(URI entityUri, ImportTranslator importTranslator, SessionStorage sessionStorage) throws ImportException { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; entityFile = new File(entityUri); setDomNodesFromExistingFile(); } private void assignIdentiferAndFile() { String idString; entityData = new EntityData(new UniqueIdentifier(UniqueIdentifier.IdentifierType.lid)); idString = entityData.getUniqueIdentifier().getQueryIdentifier() + ".kmdi"; File subDirectory = new File(sessionStorage.getCacheDirectory(), idString.substring(0, 2)); subDirectory.mkdir(); entityFile = new File(subDirectory, idString); } private void setDomNodesFromExistingFile() throws ImportException { try { metadataDom = ArbilComponentBuilder.getDocument(entityFile.toURI()); kinnateNode = metadataDom.getDocumentElement(); // final NodeList metadataNodeList = ((Element) kinnateNode).getElementsByTagNameNS("http://mpi.nl/tla/kin", "Metadata"); final NodeList metadataNodeList = ((Element) kinnateNode).getElementsByTagName("Metadata"); if (metadataNodeList.getLength() < 1) { throw new ImportException("Metadata node not found"); } metadataNode = (Element) metadataNodeList.item(0); // remove any old entity data which will be replaced on save with the existingEntity data provided final NodeList entityNodeList = ((Element) kinnateNode).getElementsByTagNameNS("*", "Entity"); // todo: this name space could be specified when the schema is complete: "http://mpi.nl/tla/kin" instead of "*" for (int entityCounter = 0; entityCounter < entityNodeList.getLength(); entityCounter++) { if (entityData == null) { JAXBContext jaxbContext = JAXBContext.newInstance(EntityData.class); Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); entityData = (EntityData) unmarshaller.unmarshal(entityNodeList.item(entityCounter), EntityData.class).getValue(); } kinnateNode.removeChild(entityNodeList.item(entityCounter)); } currentDomNode = metadataNode; if (entityData == null) { throw new ImportException("Entity node not found"); } } catch (JAXBException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (ParserConfigurationException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (SAXException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (IOException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } } public String getFileName() { return entityFile.getName(); } public UniqueIdentifier getUniqueIdentifier() { return entityData.getUniqueIdentifier(); } public URI createBlankDocument(boolean overwriteExisting) throws ImportException { if (metadataDom != null) { throw new ImportException("The document already exists"); } URI entityUri; if (!overwriteExisting && entityFile.exists()) { throw new ImportException("Skipping existing entity file"); } else { // start skip overwrite try { entityUri = entityFile.toURI(); URI xsdUri = new CmdiTransformer(sessionStorage).getXsdUrlString("individual"); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); String templateXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<Kinnate \n" + "xmlns:kin=\"http://mpi.nl/tla/kin\" \n" + "xmlns:dcr=\"http://www.isocat.org/ns/dcr\" \n" + "xmlns:ann=\"http://www.clarin.eu\" \n" + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n" + "xmlns:cmd=\"http://www.clarin.eu/cmd/\" \n" + "xmlns=\"http://www.clarin.eu/cmd/\" \n" + "KmdiVersion=\"1.1\" \n" + "xsi:schemaLocation=\"http://mpi.nl/tla/kin " + xsdUri.toString() + "\n \" />"; System.out.println("templateXml: " + templateXml); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); metadataDom = documentBuilder.parse(new InputSource(new StringReader(templateXml))); metadataNode = metadataDom.createElementNS("http://www.clarin.eu/cmd/", "Metadata"); currentDomNode = metadataNode; kinnateNode = metadataDom.getDocumentElement(); } catch (DOMException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (ParserConfigurationException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (IOException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (SAXException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (KinXsdException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } return entityUri; } } public void insertValue(String nodeName, String valueString) { // this method will create a flat xml file and reuse any existing nodes of the target name ImportTranslator.TranslationElement translationElement = importTranslator.translate(nodeName, valueString); System.out.println("insertValue: " + translationElement.fieldName + " : " + translationElement.fieldValue); Node currentNode = metadataNode.getFirstChild(); while (currentNode != null) { if (translationElement.fieldName.equals(currentNode.getLocalName())) { if (currentNode.getTextContent() == null || currentNode.getTextContent().length() == 0) { // put the value into this node currentNode.setTextContent(translationElement.fieldValue); return; } if (currentNode.getTextContent().equals(translationElement.fieldValue)) { // if the value already exists then do not add again return; } } currentNode = currentNode.getNextSibling(); } Node valueElement = metadataDom.createElementNS("http://www.clarin.eu/cmd/", /*"cmd:" +*/ translationElement.fieldName); valueElement.setTextContent(translationElement.fieldValue); metadataNode.appendChild(valueElement); } private void importNode(Node foreignNode) { Node importedNode = metadataDom.importNode(foreignNode, true); while (importedNode.hasChildNodes()) { // the metadata node already exists so just add the child nodes of it Node currentChild = importedNode.getFirstChild(); currentDomNode.appendChild(currentChild); // importedNode.removeChild(currentChild); } } public Node insertNode(String nodeName, String valueString) { ImportTranslator.TranslationElement translationElement = importTranslator.translate(nodeName, valueString); System.out.println("nodeName: " + translationElement.fieldName + " : " + translationElement.fieldValue); Node valueElement = metadataDom.createElementNS("http://www.clarin.eu/cmd/", /*"cmd:" +*/ translationElement.fieldName); valueElement.setTextContent(translationElement.fieldValue); currentDomNode.appendChild(valueElement); return valueElement; } public void assendToLevel(int nodeLevel) { int levelCount = 0; Node counterNode = currentDomNode; while (counterNode != null) { levelCount++; counterNode = counterNode.getParentNode(); } levelCount = levelCount - 3; // always keep the kinnate.metadata nodes while (levelCount > nodeLevel) { levelCount--; currentDomNode = currentDomNode.getParentNode(); } } public void appendValueToLast(String valueString) { System.out.println("appendValueToLast: " + valueString); currentDomNode.setTextContent(currentDomNode.getTextContent() + valueString); } public void appendValue(String nodeName, String valueString, int targetLevel) { // this method will create a structured xml file // the nodeName will be translated if required in insertNode() System.out.println("appendValue: " + nodeName + " : " + valueString + " : " + targetLevel); assendToLevel(targetLevel); NodeList childNodes = currentDomNode.getChildNodes(); if (childNodes.getLength() == 1 && childNodes.item(0).getNodeType() == Node.TEXT_NODE) { // getTextContent returns the text value of all sub nodes so make sure there is only one node which would be the text node String currentValue = currentDomNode.getTextContent(); if (currentValue != null && currentValue.trim().length() > 0) { Node spacerElement = metadataDom.createElementNS("http://www.clarin.eu/cmd/", /*"cmd:" +*/ currentDomNode.getLocalName()); Node parentNode = currentDomNode.getParentNode(); parentNode.removeChild(currentDomNode); spacerElement.appendChild(currentDomNode); parentNode.appendChild(spacerElement); currentDomNode = spacerElement; // currentDomNode.setTextContent(""); //insertNode(currentDomNode.getLocalName(), currentValue); } } currentDomNode = insertNode(nodeName, valueString); } // public void insertDefaultMetadata() { // // todo: this could be done via Arbil code and the schema when that is ready // insertValue("Gender", "unspecified"); // insertValue("Name", "unspecified"); // } public File getFile() { return entityFile; } public String getFilePath() { return entityFile.getAbsolutePath(); } public void setAsDeletedDocument() throws ImportException { // todo: } public void saveDocument() throws ImportException { try { JAXBContext jaxbContext = JAXBContext.newInstance(EntityData.class); Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.marshal(entityData, kinnateNode); } catch (JAXBException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } // try { // Node entityNode = org.apache.xpath.XPathAPI.selectSingleNode(metadataDom, "/:Kinnate/:Entity"); kinnateNode.appendChild(metadataNode); // todo: maybe insert the user selected CMDI profile into the XML declaration of the kinnate node and let arbil handle the adding of sub nodes or consider using ArbilComponentBuilder to insert a cmdi sub component into the metadata node or keep the cmdi data in a separate file // } catch (TransformerException exception) { // new ArbilBugCatcher().logError(exception); // throw new ImportException("Error: " + exception.getMessage()); // } ArbilComponentBuilder.savePrettyFormatting(metadataDom, entityFile); System.out.println("saved: " + entityFile.toURI().toString()); } // private EntityDocument(File destinationDirectory, String typeString, String idString, HashMap<String, ArrayList<String>> createdNodeIds, boolean overwriteExisting) { }
desktop/src/main/java/nl/mpi/kinnate/kindocument/EntityDocument.java
package nl.mpi.kinnate.kindocument; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.net.URI; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import nl.mpi.arbil.data.ArbilComponentBuilder; import nl.mpi.arbil.userstorage.SessionStorage; import nl.mpi.arbil.util.ArbilBugCatcher; import nl.mpi.kinnate.gedcomimport.ImportException; import nl.mpi.kinnate.kindata.EntityData; import nl.mpi.kinnate.uniqueidentifiers.UniqueIdentifier; import org.w3c.dom.DOMException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * Document : EntityBuilder * Created on : May 30, 2011, 1:25:05 PM * Author : Peter Withers */ public class EntityDocument { File entityFile = null; Document metadataDom = null; Node kinnateNode = null; Element metadataNode = null; Node currentDomNode = null; public EntityData entityData = null; private ImportTranslator importTranslator; public static String defaultEntityType = "individual"; private SessionStorage sessionStorage; public EntityDocument(ImportTranslator importTranslator, SessionStorage sessionStorage) { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; assignIdentiferAndFile(); } public EntityDocument(String entityType, ImportTranslator importTranslator, SessionStorage sessionStorage) throws ImportException { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; assignIdentiferAndFile(); try { // construct the metadata file URI xsdUri = new CmdiTransformer(sessionStorage).getXsdUrlString(entityType); URI addedNodeUri = new ArbilComponentBuilder().createComponentFile(entityFile.toURI(), xsdUri, false); } catch (KinXsdException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } setDomNodesFromExistingFile(); } public EntityDocument(EntityDocument entityDocumentToCopy, ImportTranslator importTranslator, SessionStorage sessionStorage) throws ImportException { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; assignIdentiferAndFile(); try { // load the document that needs to be copied so that it can be saved into the new location metadataDom = ArbilComponentBuilder.getDocument(entityDocumentToCopy.entityFile.toURI()); ArbilComponentBuilder.savePrettyFormatting(metadataDom, entityFile); } catch (IOException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (ParserConfigurationException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (SAXException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } // replace the entity data in the new document setDomNodesFromExistingFile(); } public EntityDocument(File destinationDirectory, String nameString, String entityType, ImportTranslator importTranslator, SessionStorage sessionStorage) throws ImportException { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; String idString; entityData = new EntityData(new UniqueIdentifier(UniqueIdentifier.IdentifierType.lid)); if (nameString != null) { idString = nameString; entityFile = new File(destinationDirectory, nameString + ".kmdi"); } else { idString = entityData.getUniqueIdentifier().getQueryIdentifier() + ".kmdi"; File subDirectory = new File(destinationDirectory, idString.substring(0, 2)); subDirectory.mkdir(); entityFile = new File(subDirectory, idString); } try { // construct the metadata file URI xsdUri = new CmdiTransformer(sessionStorage).getXsdUrlString(entityType); URI addedNodeUri = new ArbilComponentBuilder().createComponentFile(entityFile.toURI(), xsdUri, false); } catch (KinXsdException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } setDomNodesFromExistingFile(); } public EntityDocument(URI entityUri, ImportTranslator importTranslator, SessionStorage sessionStorage) throws ImportException { this.importTranslator = importTranslator; this.sessionStorage = sessionStorage; entityFile = new File(entityUri); setDomNodesFromExistingFile(); } private void assignIdentiferAndFile() { String idString; entityData = new EntityData(new UniqueIdentifier(UniqueIdentifier.IdentifierType.lid)); idString = entityData.getUniqueIdentifier().getQueryIdentifier() + ".kmdi"; File subDirectory = new File(sessionStorage.getCacheDirectory(), idString.substring(0, 2)); subDirectory.mkdir(); entityFile = new File(subDirectory, idString); } private void setDomNodesFromExistingFile() throws ImportException { try { metadataDom = ArbilComponentBuilder.getDocument(entityFile.toURI()); kinnateNode = metadataDom.getDocumentElement(); // final NodeList metadataNodeList = ((Element) kinnateNode).getElementsByTagNameNS("http://mpi.nl/tla/kin", "Metadata"); final NodeList metadataNodeList = ((Element) kinnateNode).getElementsByTagName("Metadata"); if (metadataNodeList.getLength() < 1) { throw new ImportException("Metadata node not found"); } metadataNode = (Element) metadataNodeList.item(0); // remove any old entity data which will be replaced on save with the existingEntity data provided final NodeList entityNodeList = ((Element) kinnateNode).getElementsByTagNameNS("*", "Entity"); // todo: this name space could be specified when the schema is complete: "http://mpi.nl/tla/kin" instead of "*" for (int entityCounter = 0; entityCounter < entityNodeList.getLength(); entityCounter++) { if (entityData == null) { JAXBContext jaxbContext = JAXBContext.newInstance(EntityData.class); Unmarshaller unmarshaller = jaxbContext.createUnmarshaller(); entityData = (EntityData) unmarshaller.unmarshal(entityNodeList.item(entityCounter), EntityData.class).getValue(); } kinnateNode.removeChild(entityNodeList.item(entityCounter)); } currentDomNode = metadataNode; if (entityData == null) { throw new ImportException("Entity node not found"); } } catch (JAXBException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (ParserConfigurationException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (SAXException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (IOException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } } public String getFileName() { return entityFile.getName(); } public UniqueIdentifier getUniqueIdentifier() { return entityData.getUniqueIdentifier(); } public URI createBlankDocument(boolean overwriteExisting) throws ImportException { if (metadataDom != null) { throw new ImportException("The document already exists"); } URI entityUri; if (!overwriteExisting && entityFile.exists()) { throw new ImportException("Skipping existing entity file"); } else { // start skip overwrite try { entityUri = entityFile.toURI(); URI xsdUri = new CmdiTransformer(sessionStorage).getXsdUrlString("individual"); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); String templateXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<Kinnate \n" + "xmlns:kin=\"http://mpi.nl/tla/kin\" \n" + "xmlns:dcr=\"http://www.isocat.org/ns/dcr\" \n" + "xmlns:ann=\"http://www.clarin.eu\" \n" + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" \n" + "xmlns:cmd=\"http://www.clarin.eu/cmd/\" \n" + "xmlns=\"http://www.clarin.eu/cmd/\" \n" + "KmdiVersion=\"1.1\" \n" + "xsi:schemaLocation=\"http://mpi.nl/tla/kin " + xsdUri.toString() + "\n \" />"; System.out.println("templateXml: " + templateXml); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); metadataDom = documentBuilder.parse(new InputSource(new StringReader(templateXml))); metadataNode = metadataDom.createElementNS("http://www.clarin.eu/cmd/", "Metadata"); currentDomNode = metadataNode; kinnateNode = metadataDom.getDocumentElement(); } catch (DOMException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (ParserConfigurationException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (IOException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (SAXException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } catch (KinXsdException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } return entityUri; } } public void insertValue(String nodeName, String valueString) { // this method will create a flat xml file and reuse any existing nodes of the target name ImportTranslator.TranslationElement translationElement = importTranslator.translate(nodeName, valueString); System.out.println("insertValue: " + translationElement.fieldName + " : " + translationElement.fieldValue); Node currentNode = metadataNode.getFirstChild(); while (currentNode != null) { if (translationElement.fieldName.equals(currentNode.getLocalName())) { if (currentNode.getTextContent() == null || currentNode.getTextContent().length() == 0) { // put the value into this node currentNode.setTextContent(translationElement.fieldValue); return; } if (currentNode.getTextContent().equals(translationElement.fieldValue)) { // if the value already exists then do not add again return; } } currentNode = currentNode.getNextSibling(); } Node valueElement = metadataDom.createElementNS("http://www.clarin.eu/cmd/", /*"cmd:" +*/ translationElement.fieldName); // todo/ ulr encode / and other chars valueElement.setTextContent(translationElement.fieldValue); metadataNode.appendChild(valueElement); } private void importNode(Node foreignNode) { Node importedNode = metadataDom.importNode(foreignNode, true); while (importedNode.hasChildNodes()) { // the metadata node already exists so just add the child nodes of it Node currentChild = importedNode.getFirstChild(); currentDomNode.appendChild(currentChild); // importedNode.removeChild(currentChild); } } public Node insertNode(String nodeName, String valueString) { ImportTranslator.TranslationElement translationElement = importTranslator.translate(nodeName, valueString); System.out.println("nodeName: " + translationElement.fieldName + " : " + translationElement.fieldValue); Node valueElement = metadataDom.createElementNS("http://www.clarin.eu/cmd/", /*"cmd:" +*/ translationElement.fieldName); valueElement.setTextContent(translationElement.fieldValue); currentDomNode.appendChild(valueElement); return valueElement; } public void assendToLevel(int nodeLevel) { int levelCount = 0; Node counterNode = currentDomNode; while (counterNode != null) { levelCount++; counterNode = counterNode.getParentNode(); } levelCount = levelCount - 3; // always keep the kinnate.metadata nodes while (levelCount > nodeLevel) { levelCount--; currentDomNode = currentDomNode.getParentNode(); } } public void appendValueToLast(String valueString) { System.out.println("appendValueToLast: " + valueString); currentDomNode.setTextContent(currentDomNode.getTextContent() + valueString); } public void appendValue(String nodeName, String valueString, int targetLevel) { // this method will create a structured xml file // the nodeName will be translated if required in insertNode() System.out.println("appendValue: " + nodeName + " : " + valueString + " : " + targetLevel); assendToLevel(targetLevel); NodeList childNodes = currentDomNode.getChildNodes(); if (childNodes.getLength() == 1 && childNodes.item(0).getNodeType() == Node.TEXT_NODE) { // getTextContent returns the text value of all sub nodes so make sure there is only one node which would be the text node String currentValue = currentDomNode.getTextContent(); if (currentValue != null && currentValue.trim().length() > 0) { Node spacerElement = metadataDom.createElementNS("http://www.clarin.eu/cmd/", /*"cmd:" +*/ currentDomNode.getLocalName()); Node parentNode = currentDomNode.getParentNode(); parentNode.removeChild(currentDomNode); spacerElement.appendChild(currentDomNode); parentNode.appendChild(spacerElement); currentDomNode = spacerElement; // currentDomNode.setTextContent(""); //insertNode(currentDomNode.getLocalName(), currentValue); } } currentDomNode = insertNode(nodeName, valueString); } // public void insertDefaultMetadata() { // // todo: this could be done via Arbil code and the schema when that is ready // insertValue("Gender", "unspecified"); // insertValue("Name", "unspecified"); // } public File getFile() { return entityFile; } public String getFilePath() { return entityFile.getAbsolutePath(); } public void setAsDeletedDocument() throws ImportException { // todo: } public void saveDocument() throws ImportException { try { JAXBContext jaxbContext = JAXBContext.newInstance(EntityData.class); Marshaller marshaller = jaxbContext.createMarshaller(); marshaller.marshal(entityData, kinnateNode); } catch (JAXBException exception) { new ArbilBugCatcher().logError(exception); throw new ImportException("Error: " + exception.getMessage()); } // try { // Node entityNode = org.apache.xpath.XPathAPI.selectSingleNode(metadataDom, "/:Kinnate/:Entity"); kinnateNode.appendChild(metadataNode); // todo: maybe insert the user selected CMDI profile into the XML declaration of the kinnate node and let arbil handle the adding of sub nodes or consider using ArbilComponentBuilder to insert a cmdi sub component into the metadata node or keep the cmdi data in a separate file // } catch (TransformerException exception) { // new ArbilBugCatcher().logError(exception); // throw new ImportException("Error: " + exception.getMessage()); // } ArbilComponentBuilder.savePrettyFormatting(metadataDom, entityFile); System.out.println("saved: " + entityFile.toURI().toString()); } // private EntityDocument(File destinationDirectory, String typeString, String idString, HashMap<String, ArrayList<String>> createdNodeIds, boolean overwriteExisting) { }
Refactored to match recent changes to Arbil so that singletons are not static but are injected via the constructor. Added testes for the import translator and import character escaping process. Update the CSV import to support more formats.
desktop/src/main/java/nl/mpi/kinnate/kindocument/EntityDocument.java
Refactored to match recent changes to Arbil so that singletons are not static but are injected via the constructor. Added testes for the import translator and import character escaping process. Update the CSV import to support more formats.
<ide><path>esktop/src/main/java/nl/mpi/kinnate/kindocument/EntityDocument.java <ide> } <ide> currentNode = currentNode.getNextSibling(); <ide> } <del> Node valueElement = metadataDom.createElementNS("http://www.clarin.eu/cmd/", /*"cmd:" +*/ translationElement.fieldName); // todo/ ulr encode / and other chars <add> Node valueElement = metadataDom.createElementNS("http://www.clarin.eu/cmd/", /*"cmd:" +*/ translationElement.fieldName); <ide> valueElement.setTextContent(translationElement.fieldValue); <ide> metadataNode.appendChild(valueElement); <ide> }
Java
apache-2.0
b1841cf901b615ceec7437a37c193f89d2a4e4f8
0
mcculls/bnd,psoreide/bnd,joansmith/bnd,lostiniceland/bnd,xtracoder/bnd,GEBIT/bnd,psoreide/bnd,joansmith/bnd,lostiniceland/bnd,magnet/bnd,GEBIT/bnd,magnet/bnd,mcculls/bnd,psoreide/bnd,mcculls/bnd,magnet/bnd,xtracoder/bnd,lostiniceland/bnd
package aQute.bnd.jpm; import static aQute.lib.io.IO.*; import java.awt.*; import java.awt.datatransfer.*; import java.io.*; import java.net.*; import java.util.*; import java.util.List; import java.util.concurrent.*; import java.util.regex.*; import aQute.bnd.build.*; import aQute.bnd.build.Container; import aQute.bnd.osgi.*; import aQute.bnd.osgi.Verifier; import aQute.bnd.osgi.resource.*; import aQute.bnd.service.*; import aQute.bnd.service.repository.*; import aQute.bnd.version.*; import aQute.jpm.facade.repo.*; import aQute.jsonrpc.proxy.*; import aQute.lib.collections.*; import aQute.lib.converter.*; import aQute.lib.hex.*; import aQute.lib.io.*; import aQute.lib.justif.*; import aQute.lib.settings.*; import aQute.libg.cryptography.*; import aQute.libg.glob.*; import aQute.libg.reporter.*; import aQute.rest.urlclient.*; import aQute.service.library.*; import aQute.service.library.Library.Program; import aQute.service.library.Library.Revision; import aQute.service.library.Library.RevisionRef; import aQute.service.reporter.*; /** * A bnd repository based on the jpm4j server. */ public class Repository implements Plugin, RepositoryPlugin, Closeable, Refreshable, Actionable, RegistryPlugin, SearchableRepository, InfoRepository { public static final String REPO_DEFAULT_URI = "http://repo.jpm4j.org/"; private static final PutOptions DEFAULT_OPTIONS = new PutOptions(); private final String DOWN_ARROW = " \u21E9"; protected final DownloadListener[] EMPTY_LISTENER = new DownloadListener[0]; private Pattern SHA = Pattern.compile( "([A-F0-9][a-fA-F0-9]){20,20}", Pattern.CASE_INSENSITIVE); private final Justif j = new Justif(80, new int[] { 20, 28, 36, 44 }); private Settings settings = new Settings(); private boolean canwrite; final MultiMap<File,DownloadListener> queues = new MultiMap<File,RepositoryPlugin.DownloadListener>(); private final Pattern JPM_REVISION_URL_PATTERN = Pattern .compile("https?://.+#!?/p/([^/]+)/([^/]+)/([^/]*)/([^/]+)"); private Options options; Reporter reporter = new ReporterAdapter(System.out); /** * Maintains the index of what we've downloaded so far. */ private File indexFile; private boolean indexRecurse; Index index; private boolean offline; private Registry registry; StoredRevisionCache cache; Set<File> notfound = new HashSet<File>(); private Set<String> notfoundref = new HashSet<String>(); final Semaphore limitDownloads = new Semaphore(12); private JpmRepo library; private String depositoryGroup; private String depositoryName; private URLClient urlc; private String location; private URLClient depository; private String email; private String name; URI url; /** * Reports downloads but does never block on them. This is a best effort, if * it fails, we can still get them later. */ class LocalDownloadListener implements DownloadListener { @Override public void success(File file) throws Exception { reporter.trace("downloaded %s", file); } @Override public void failure(File file, String reason) throws Exception { reporter.trace("failed to downloaded %s", file); } @Override public boolean progress(File file, int percentage) throws Exception { reporter.trace("Downloadedin %s %s%", file, percentage); return true; } } interface Options { /** * The URL to the remote repository. Default is http://repo.jpm4j.org * * @return */ URI url(); /** * The group of a depository,optional. * * @return */ String depository_group(); /** * The name of the depository * * @return */ String depository_name(); /** * The email address of the user * * @return */ String email(); /** * Where the index file is stored. The default should reside in the * workspace and be part of the scm * * @return */ String index(); /** * The cache location, default is ~/.bnd/cache. This file is relative * from the users home directory if not absolute. * * @return */ String location(); /** * Set the settings */ String settings(); /** * The name of the repo * * @return */ String name(); /** * Fetch dependencies automatically */ boolean recurse(); boolean trace(); } /** * Get a revision. */ @Override public File get(String bsn, Version version, Map<String,String> attrs, final DownloadListener... listeners) throws Exception { init(); // Check if we're supposed to have this RevisionRef resource = index.getRevisionRef(bsn, version); if (resource == null) return null; else return getLocal(resource, attrs, listeners); } /** * The index indicates we're allowed to have this one. So check if we have * it cached or if we need to download it. */ private File getLocal(RevisionRef resource, Map<String,String> attrs, DownloadListener... downloadListeners) throws Exception { File sources = cache.getPath(resource.bsn, Index.toVersion(resource).toString(), resource.revision, true); if (sources.isFile()) { for (DownloadListener dl : downloadListeners) { dl.success(sources); } return sources; } File file = cache.getPath(resource.bsn, Index.toVersion(resource).toString(), resource.revision); scheduleDownload(file, resource.revision, resource.size, resource.urls, downloadListeners); return file; } /** * Schedule a download, handling the listeners * * @param url */ private void scheduleDownload(final File file, final byte[] sha, final long size, final Set<URI> urls, DownloadListener... listeners) throws Exception { synchronized (notfound) { if (notfound.contains(file)) { failure(listeners, file, "Not found"); return; } } if (file.isFile()) { if (file.length() == size) { // Already exists, done success(listeners, file); reporter.trace("was in cache"); return; } reporter.error("found file but of different length %s, will refetch", file); } else { reporter.trace("not in cache %s", file + " " + queues); } // Check if we need synchronous if (listeners.length == 0) { reporter.trace("in cache, no listeners"); cache.download(file, urls, sha); return; } // // With download listeners we need to be careful to queue them // appropriately. Don't want to download n times because // requests arrive during downloads. // synchronized (queues) { List<DownloadListener> list = queues.get(file); boolean first = list == null || list.isEmpty(); for (DownloadListener l : listeners) { queues.add(file, l); } if (!first) { // return, file is being downloaded by another and that // other will signal the download listener. reporter.trace("someone else is downloading our file " + queues.get(file)); return; } } try { reporter.trace("starting thread for " + file); // Limit the total downloads going on at the same time limitDownloads.acquire(); Thread t = new Thread("Downloading " + file) { public void run() { try { reporter.trace("downloading in background " + file); cache.download(file, urls, sha); success(queues.get(file).toArray(EMPTY_LISTENER), file); } catch (FileNotFoundException e) { synchronized (notfound) { reporter.error("Not found %s", e, file); notfound.add(file); } synchronized (queues) { failure(queues.get(file).toArray(EMPTY_LISTENER), file, e.toString()); } } catch (Throwable e) { e.printStackTrace(); reporter.error("failed to download %s: %s", e, file); synchronized (queues) { failure(queues.get(file).toArray(EMPTY_LISTENER), file, e.toString()); } } finally { synchronized (queues) { queues.remove(file); } reporter.trace("downloaded " + file); // Allow other downloads to start limitDownloads.release(); } } }; t.start(); } catch (Exception e) { // Is very unlikely to happen but we must ensure the // listeners are called and we're at the head of the queue reporter.error("Starting a download for %s failed %s", file, e); synchronized (queues) { failure(queues.get(file).toArray(EMPTY_LISTENER), file, e.toString()); queues.remove(file); } } } /** * API method */ @Override public boolean canWrite() { return canwrite; } /** * Put an artifact in the repo */ @Override public PutResult put(InputStream in, PutOptions options) throws Exception { if (!canwrite) throw new UnsupportedOperationException( "This is not a writeable repo, s" + "et depository.group, depository.name and properties and ensure the email property is in your global settings"); assert in != null; assert depositoryGroup != null; assert depositoryName != null; init(); if (options == null) options = DEFAULT_OPTIONS; reporter.trace("syncing"); sync(); File file = File.createTempFile("put", ".jar"); file.deleteOnExit(); try { reporter.trace("creating tmp copy"); copy(in, file); if (depository == null) { URI url = library.depository(depositoryGroup, depositoryName); reporter.trace("send to url " + url); depository = new URLClient(url.toString()); setCredentials(depository); reporter.trace("credentials " + depository); } byte[] digest = options.digest == null ? SHA1.digest(file).digest() : options.digest; String path = Hex.toHexString(digest); reporter.trace("putting " + path); Library.RevisionRef d = depository.put(path, file, Library.RevisionRef.class, null); if (d == null) { reporter.error("Cant deposit %s", file); return null; } if (!Arrays.equals(digest, d.revision)) throw new Exception("Invalid digest"); // Copy it to our cache cache.add(d, file); index.addRevision(d); index.save(); // Coordinator PutResult putr = new PutResult(); putr.artifact = depository.getUri(path); putr.digest = digest; return putr; } catch (Exception e) { e.printStackTrace(); throw e; } finally { file.delete(); } } /** * If we have no search or an empty search we list our index. Otherwise we * query remotely. */ Pattern COMMAND_P = Pattern.compile("^([^/]*)/(!?[lmsprw])([^/]*)$"); @Override public List<String> list(String query) throws Exception { init(); Set<String> bsns = new HashSet<String>(); if (query == null || query.trim().isEmpty()) query = "*"; else query = query.trim(); Library.Phase phase = null; boolean negated = false; Matcher m = COMMAND_P.matcher(query); if (m.matches()) { query = m.group(1) + m.group(3); String cmd = m.group(2); if (cmd.startsWith("!")) { negated = true; cmd = cmd.substring(1); } char c = Character.toLowerCase(cmd.charAt(0)); switch (c) { case 'l' : phase = Library.Phase.LOCKED; break; case 'p' : phase = Library.Phase.PENDING; break; case 's' : phase = Library.Phase.STAGING; break; case 'm' : phase = Library.Phase.MASTER; break; case 'r' : phase = Library.Phase.RETIRED; break; case 'w' : phase = Library.Phase.WITHDRAWN; break; } reporter.trace("Phase is " + c + " " + phase); } Glob glob = null; try { glob = new Glob(query); } catch (Exception e) { glob = new Glob("*"); } bsn: for (String bsn : index.getBsns()) { if (glob.matcher(bsn).matches()) { if (phase != null) { boolean hasPhase = false; revision: for (Version version : index.getVersions(bsn)) { RevisionRef ref = index.getRevisionRef(bsn, version); if (ref.phase == phase) { hasPhase = true; break revision; } } if (hasPhase == negated) continue bsn; } bsns.add(bsn); } } List<String> result = new ArrayList<String>(bsns); Collections.sort(result); return result; } /** * List the versions belonging to a bsn */ @Override public SortedSet<Version> versions(String bsn) throws Exception { init(); SortedSet<Version> versions = index.getVersions(bsn); if (!versions.isEmpty() || !index.isLearning()) { return versions; } return versions; } /* * Convert a baseline/qualifier to a version */ static Version toVersion(String baseline, String qualifier) { if (qualifier == null || qualifier.isEmpty()) return new Version(baseline); else return new Version(baseline + "." + qualifier); } /* * Return if bsn is a SHA */ private boolean isSha(String bsn) { return SHA.matcher(bsn).matches(); } @Override public String getName() { return name == null ? "jpm4j" : name; } @Override public void setProperties(Map<String,String> map) { reporter.trace("CLs " + getClass().getClassLoader() + " " + URLClient.class.getClassLoader()); try { options = Converter.cnv(Options.class, map); setOptions(options); } catch (Exception e) { throw new RuntimeException(e); } } public void setOptions(Options options) { try { location = options.location(); if (location == null) location = "~/.bnd/shacache"; this.name = options.name(); if (options.settings() != null) { settings = new Settings(options.settings()); } email = options.email(); if (email == null) email = settings.getEmail(); url = options.url(); if (url == null) url = new URI(REPO_DEFAULT_URI); urlc = new URLClient(url.toString()); if (email != null && !email.contains("anonymous")) setCredentials(urlc); urlc.setReporter(reporter); File cacheDir = IO.getFile(IO.home, location); cacheDir.mkdirs(); if (!cacheDir.isDirectory()) throw new IllegalArgumentException("Not able to create cache directory " + cacheDir); String indexPath = options.index(); if (indexPath == null) throw new IllegalArgumentException("Index file not set (index) "); indexFile = IO.getFile(indexPath); if (indexFile.isDirectory()) throw new IllegalArgumentException("Index file is a directory instead of a file " + indexFile.getAbsolutePath()); indexRecurse = options.recurse(); cache = new StoredRevisionCache(cacheDir, settings); library = JSONRPCProxy.createRPC(JpmRepo.class, urlc, "jpm"); if (options.index() == null) throw new IllegalArgumentException("Index file not set"); canwrite = false; if (options.depository_group() != null) { depositoryGroup = options.depository_group(); depositoryName = options.depository_name(); if (depositoryName == null) depositoryName = "home"; canwrite = email != null; } } catch (Exception e) { if (reporter != null) reporter.exception(e, "Creating options"); throw new RuntimeException(e); } } private void setCredentials(URLClient urlc) throws UnknownHostException, Exception { urlc.credentials(email, InetAddress.getLocalHost().getHostName(), settings.getPublicKey(), settings.getPrivateKey()); } @Override public void setReporter(Reporter processor) { reporter = processor; if (index != null) index.setReporter(reporter); if (urlc != null) urlc.setReporter(processor); } @Override public boolean refresh() throws Exception { index = new Index(indexFile); index.setRecurse(indexRecurse); cache.refresh(); notfound.clear(); notfoundref.clear(); return true; } /** * Return the actions for this repository */ @Override public Map<String,Runnable> actions(Object... target) throws Exception { init(); boolean connected = isConnected(); if (target == null) return null; if (target.length == 0) return getRepositoryActions(); final String bsn = (String) target[0]; Program careful = null; if (connected) try { careful = getProgram(bsn, true); } catch (Exception e) { reporter.error("Offline? %s", e); } final Program p = careful; if (target.length == 1) return getProgramActions(bsn, p); if (target.length >= 2) { final Version version = (Version) target[1]; return getRevisionActions(p, bsn, version); } return null; } /** * @param p * @param bsn * @param version * @return * @throws Exception */ static Pattern JAR_FILE_P = Pattern.compile("(https?:.+)(\\.jar)"); private Map<String,Runnable> getRevisionActions(final Program program, final String bsn, final Version version) throws Exception { final Library.RevisionRef resource = index.getRevisionRef(bsn, version); Map<String,Runnable> map = new LinkedHashMap<String,Runnable>(); map.put("Inspect Revision", new Runnable() { public void run() { open(url + "#!/p/sha/" + Hex.toHexString(resource.revision) + "//0.0.0"); } }); map.put("Copy reference", new Runnable() { @Override public void run() { toClipboard(bsn, version); } }); Runnable doUpdate = getUpdateAction(program, resource); if (doUpdate != null) { map.put("Update to " + doUpdate, doUpdate); } else { map.put("-Update", null); } map.put("Delete", new Runnable() { public void run() { try { delete(bsn, version, true); } catch (Exception e) { e.printStackTrace(); } } }); if (isConnected()) { final File sourceFile = cache.getPath(bsn, version.toString(), resource.revision, true); Runnable run = null; if (!sourceFile.isFile()) { URL sourceURI = null; for (URI uri : resource.urls) { try { Matcher m = JAR_FILE_P.matcher(uri.toString()); if (m.matches()) { String stem = m.group(1); URL src = new URL(stem + "-sources.jar"); HttpURLConnection conn = (HttpURLConnection) src.openConnection(); conn.setRequestMethod("HEAD"); if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) { sourceURI = src; continue; } } } catch (Exception e) { // ignore } } if (sourceURI != null) { run = createAddSourceAction(bsn, version, resource, sourceFile, sourceURI); } } else reporter.trace("sources in %s", sourceFile); if (run != null) map.put("Add Sources", run); else map.put("-Add Sources", null); } if (cache.hasSources(bsn, version.toString(), resource.revision)) { map.put("Remove Sources", new Runnable() { @Override public void run() { cache.removeSources(bsn, version.toString(), resource.revision); } }); } return map; } /** * @param bsn * @param version * @param resource * @param withSources * @param src * @return */ protected Runnable createAddSourceAction(final String bsn, final Version version, final Library.RevisionRef resource, final File withSources, final URL src) { Runnable run; run = new Runnable() { public void run() { try { // Sync downloads so that we do not assume the // binary is already there ... so call without // listeners. get(bsn, version, null); File file = cache.getPath(bsn, version.toString(), resource.revision); Jar binary = new Jar(file); try { Jar sources = new Jar(src.getFile(), src.openStream()); binary.setDoNotTouchManifest(); try { binary.addAll(sources, null, "OSGI-OPT/src"); binary.write(withSources); } finally { sources.close(); } } finally { binary.close(); } } catch (Exception e) { throw new RuntimeException(e); } } }; return run; } /** * @param bsn * @param version * @param resource * @param withSources * @param src * @return */ protected Runnable createRemoveSourceAction(final String bsn, final Version version, final Library.RevisionRef resource, final File withSources, final URL src) { Runnable run; run = new Runnable() { public void run() { try { // Sync downloads so that we do not assume the // binary is already there ... so call without // listeners. get(bsn, version, null); File file = cache.getPath(bsn, version.toString(), resource.revision); Jar binary = new Jar(file); try { Jar sources = new Jar(src.getFile(), src.openStream()); try { binary.addAll(sources, null, "OSGI-OPT/src"); binary.write(withSources); } finally { sources.close(); } } finally { binary.close(); } } catch (Exception e) { throw new RuntimeException(e); } } }; return run; } /** * @param bsn * @param p * @return * @throws Exception */ private Map<String,Runnable> getProgramActions(final String bsn, final Program p) throws Exception { Map<String,Runnable> map = new LinkedHashMap<String,Runnable>(); if (p != null) { map.put("Inspect Program", new Runnable() { public void run() { open(url + "#!/p/osgi/" + bsn); } }); final SortedSet<Version> versions = index.getVersions(bsn); if (versions.isEmpty()) map.put("-Copy reference", null); else map.put("Copy reference", new Runnable() { @Override public void run() { toClipboard(bsn, versions.first()); } }); RevisionRef ref = p.revisions.get(0); Version latest = toVersion(ref.baseline, ref.qualifier); for (Version v : index.getVersions(bsn)) { if (v.equals(latest)) { latest = null; break; } } final Version l = latest; String title = "Get Latest"; if (latest == null) title = "-" + title; else title += " " + l + ref.phase; map.put(title, new Runnable() { public void run() { try { add(bsn, l); } catch (Exception e) { throw new RuntimeException(e); } } }); Runnable updateAction = getUpdateAction(p, bsn); if (updateAction != null) map.put("Update " + updateAction, updateAction); else map.put("-Update", null); } else { map.put("-Update (offline)", null); } map.put("Delete", new Runnable() { public void run() { try { delete(bsn); } catch (Exception e) { throw new RuntimeException(e); } } }); return map; } /** * @return * @throws Exception */ private Map<String,Runnable> getRepositoryActions() throws Exception { Map<String,Runnable> map = new LinkedHashMap<String,Runnable>(); map.put("Inspect", new Runnable() { public void run() { try { byte[] revisions = sync(); open(url + "#!/revisions/" + Hex.toHexString(revisions)); } catch (Exception e) { throw new RuntimeException(e); } } }); map.put("Delete Cache", new Runnable() { @Override public void run() { try { cache.deleteAll(); } catch (Exception e) { reporter.error("Deleting cache %s", e); } } }); map.put("Refresh", new Runnable() { @Override public void run() { try { refresh(); } catch (Exception e) { reporter.error("Refreshing %s", e); } } }); map.put("Update All", new Runnable() { @Override public void run() { try { updateAll(); } catch (Exception e) { reporter.error("Update all %s", e); } } }); map.put("Download All", new Runnable() { @Override public void run() { try { DownloadListener dl = new DownloadListener() { @Override public void success(File file) throws Exception { reporter.trace("downloaded %s", file); } @Override public void failure(File file, String reason) throws Exception { reporter.trace("failed to download %s becasue %s", file, reason); } @Override public boolean progress(File file, int percentage) throws Exception { reporter.progress(((float) percentage) / 100, "downloading %s", file); return true; } }; for (String bsn : list(null)) { for (Version v : versions(bsn)) { get(bsn, v, null, dl); } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); map.put("Remove unused/Add missing", new Runnable() { @Override public void run() { try { cleanUp(); } catch (Exception e) { e.printStackTrace(); } } }); String title = "Learning{Unknown resources are an error, select to learn}"; if (index.isLearning()) { title = "!Learning{Will attempt to fetch unknown resources, select to make this an error}"; } map.put(title, new Runnable() { @Override public void run() { try { index.setLearning(!index.isLearning()); index.save(); } catch (Exception e) { reporter.error("Learning %s", e); } } }); title = "Recurse{Do not fetch dependencies automatically}"; if (index.isRecurse()) { title = "!Recurse{Fetch dependencies automatically}"; } map.put(title, new Runnable() { @Override public void run() { try { index.setRecurse(!index.isRecurse()); index.save(); } catch (Exception e) { reporter.error("Learning %s", e); } } }); return map; } @Override public String tooltip(Object... target) throws Exception { init(); if (target == null || target.length == 0) return repositoryTooltip(); if (target.length == 1) return programTooltip((String) target[0]); if (target.length == 2) return revisionTooltip((String) target[0], (Version) target[1]); return "Hmm, have no idea on what object you want a tooltip ..."; } private String repositoryTooltip() throws Exception { Formatter f = new Formatter(); try { f.format("%s\n", this); if (depositoryGroup != null && depositoryName != null) { f.format("\n[Depository]\n"); f.format("Group: %s\n", depositoryGroup); f.format("Depository: %s\n", depositoryName); f.format("Email: %s\n", email); f.format("Writable: %s %s\n", canwrite, (email == null ? "(no email set, see 'bnd settings email=...')" : "")); f.format("Public key: %s…\n", Hex.toHexString(settings.getPublicKey()).substring(0, 16)); } f.format("\n[Files]\nCache location %s\n", options.location()); f.format("Index file %s\n", options.index()); f.format("Number of bsns %s\n", index.getBsns().size()); f.format("Number of revs %s\n", index.getRevisionRefs().size()); f.format("Dirty %s\n", index.isDirty()); return f.toString().trim(); } finally { f.close(); } } private String programTooltip(String bsn) throws Exception { Program p = getProgram(bsn, false); if (p != null) { Formatter sb = new Formatter(); try { if (p.wiki != null && p.wiki.text != null) sb.format("%s\n", p.wiki.text.replaceAll("#\\s?", "")); else if (p.last.description != null) sb.format("%s\n", p.last.description); else sb.format("No description\n"); j.wrap((StringBuilder) sb.out()); return sb.toString().trim(); } finally { sb.close(); } } return null; } private String revisionTooltip(String bsn, Version version) throws Exception { RevisionRef r = getRevisionRef(bsn, version); if (r == null) return null; Formatter sb = new Formatter(); try { sb.format("[%s:%s", r.groupId, r.artifactId); if (r.classifier != null) { sb.format(":%s", r.classifier); } sb.format("@%s] %s\n\n", r.version, r.phase); if (r.releaseSummary != null) sb.format("%s\n\n", r.releaseSummary); if (r.description != null) sb.format("%s\n\n", r.description.replaceAll("#\\s*", "")); sb.format("Size: %s\n", size(r.size, 0)); sb.format("SHA-1: %s\n", Hex.toHexString(r.revision)); sb.format("Age: %s\n", age(r.created)); sb.format("URL: %s\n", r.urls); File f = cache.getPath(bsn, version.toString(), r.revision); if (f.isFile() && f.length() == r.size) sb.format("Cached %s\n", f); else sb.format("Not downloaded\n"); Program p = getProgram(bsn, false); if (p != null) { Runnable update = getUpdateAction(p, r); if (update != null) { sb.format("%c This version can be updated to %s\n", DOWN_ARROW, update); } } File sources = cache.getPath(bsn, version.toString(), r.revision, true); if (sources.isFile()) sb.format("Has sources: %s\n", sources.getAbsolutePath()); else sb.format("No sources\n"); j.wrap((StringBuilder) sb.out()); return sb.toString().trim(); } finally { sb.close(); } } private List<RevisionRef> getRevisionRefs(String bsn) throws Exception { String classifier = null; String parts[] = bsn.split("__"); if (parts.length == 3) { bsn = parts[0] + "__" + parts[1]; classifier = parts[2]; } Program program = getProgram(bsn, false); if (program != null) { List<RevisionRef> refs = new ArrayList<Library.RevisionRef>(); for (RevisionRef r : program.revisions) { if (eq(classifier, r.classifier)) refs.add(r); } return refs; } return Collections.emptyList(); } /** * Find a revisionref for a bsn/version * * @param bsn * @param version * @return * @throws Exception */ private RevisionRef getRevisionRef(String bsn, Version version) throws Exception { // Handle when we have a sha reference String id = bsn + "-" + version; if (notfoundref.contains(id)) return null; if (isSha(bsn) && version.equals(Version.LOWEST)) { Revision r = getRevision(new Coordinate(bsn)); if (r == null) return null; return new RevisionRef(r); } reporter.trace("Looking for %s-%s", bsn, version); for (RevisionRef r : getRevisionRefs(bsn)) { Version v = toVersion(r.baseline, r.qualifier); if (v.equals(version)) return r; } notfoundref.add(id); return null; } private boolean eq(String a, String b) { if (a == null) a = ""; if (b == null) b = ""; return a.equals(b); } private String age(long created) { if (created == 0) return "unknown"; long diff = (System.currentTimeMillis() - created) / (1000 * 60 * 60); if (diff < 48) return diff + " hours"; diff /= 24; if (diff < 14) return diff + " days"; diff /= 7; if (diff < 8) return diff + " weeks"; diff /= 4; if (diff < 24) return diff + " months"; diff /= 12; return diff + " years"; } String[] sizes = { "bytes", "Kb", "Mb", "Gb", "Tb", "Pb", "Showing off?" }; private String size(long size, int power) { if (power >= sizes.length) return size + " Pb"; if (size < 1000) return size + sizes[power]; return size(size / 1000, power + 1); } /** * Update all bsns * * @throws Exception */ void updateAll() throws Exception { for (String bsn : index.getBsns()) { update(bsn); } } /** * Update all baselines for a bsn * * @param bsn * @throws Exception */ void update(String bsn) throws Exception { Program program = getProgram(bsn, false); Runnable updateAction = getUpdateAction(program, bsn); if (updateAction == null) return; reporter.trace("update bsn %s", updateAction); updateAction.run(); } /** * Update a bsn * * @throws Exception */ Runnable getUpdateAction(Program program, String bsn) throws Exception { final Set<Runnable> update = new TreeSet<Runnable>(); for (Version v : index.getVersions(bsn)) { RevisionRef resource = index.getRevisionRef(bsn, v); Runnable updateAction = getUpdateAction(program, resource); if (updateAction != null) update.add(updateAction); } if (update.isEmpty()) return null; return new Runnable() { @Override public void run() { for (Runnable r : update) { r.run(); } } @Override public String toString() { return update.toString(); } }; } /** * Find a RevisionRef from the Program. We are looking for a version with * the same baseline but a higher qualifier or different phase. * * @param p * @param currentVersion * @return * @throws Exception */ private Runnable getUpdateAction(Program program, final RevisionRef current) throws Exception { RevisionRef candidateRef = null; Version candidate = toVersion(current.baseline, current.qualifier); for (RevisionRef r : program.revisions) { Version refVersion = toVersion(r.baseline, r.qualifier); if (eq(r.classifier, current.classifier)) { if (refVersion.compareTo(candidate) >= 0) { candidate = refVersion; candidateRef = r; } } } if (candidateRef == null) // // We're not present anymore, should never happen ... // return new Runnable() { @Override public void run() { try { index.delete(current.bsn, toVersion(current.baseline, current.qualifier)); } catch (Exception e) { throw new RuntimeException(e); } } public String toString() { return "[delete]"; } }; // // Check if we are not same revision // if (!candidateRef.version.equals(current.version)) { final RevisionRef toAdd = candidateRef; return new Runnable() { // // Replace the current version // public void run() { try { index.delete(current.bsn, toVersion(current.baseline, current.qualifier)); index.addRevision(toAdd); index.save(); } catch (Exception e) { throw new RuntimeException(e); } } public String toString() { return toAdd.version; } }; } // // So now we are the same, check if the phase has changed // if (candidateRef.phase != current.phase) { final RevisionRef toChange = candidateRef; return new Runnable() { @Override public void run() { try { index.delete(current.bsn, toVersion(current.baseline, current.qualifier)); index.addRevision(toChange); index.save(); } catch (Exception e) { throw new RuntimeException(e); } } public String toString() { return "-> " + toChange.phase; } }; } return null; } public void setIndex(File index) { indexFile = index; } void success(DownloadListener[] downloadListeners, File f) { for (DownloadListener l : downloadListeners) { try { l.success(f); } catch (Exception e) { e.printStackTrace(); } } } void failure(DownloadListener[] listeners, File f, String reason) { for (DownloadListener l : listeners) { try { l.failure(f, reason); } catch (Exception e) { e.printStackTrace(); } } } @Override public String title(Object... target) throws Exception { init(); if (target == null || target.length == 0) return getName(); if (target.length == 1 && target[0] instanceof String) { String bsn = (String) target[0]; String title = bsn; return title; } if (target.length == 2 && target[0] instanceof String && target[1] instanceof Version) { String bsn = (String) target[0]; Version version = (Version) target[1]; Library.RevisionRef resource = index.getRevisionRef(bsn, version); if (resource == null) return "[deleted " + version + "]"; String title = getPhase(resource.phase.toString()) + " " + version.toString(); File path = cache.getPath(bsn, version.toString(), resource.revision); if (path.isFile() && path.length() == resource.size) { title += DOWN_ARROW; } if (cache.getPath(bsn, version.toString(), resource.revision, true).isFile()) title += "+"; return title; } return null; } // Temp until we fixed bnd in bndtools enum Phase { STAGING(false, false, false, "[s]"), LOCKED(true, false, false, "[l]"), MASTER(true, true, true, "[m]"), RETIRED( true, false, true, "[r]"), WITHDRAWN(true, false, true, "[x]"), UNKNOWN(true, false, false, "[?]"); boolean locked; boolean listable; boolean permanent; final String symbol; private Phase(boolean locked, boolean listable, boolean permanent, String symbol) { this.locked = locked; this.listable = listable; this.permanent = permanent; this.symbol = symbol; } public boolean isLocked() { return locked; } public boolean isListable() { return listable; } public boolean isPermanent() { return permanent; } public String getSymbol() { return symbol; } } private String getPhase(String phase) { try { return Phase.valueOf(phase).getSymbol(); } catch (Exception e) { return "?"; } } @Override public File getRoot() { return cache.getRoot(); } @Override public void close() throws IOException {} @Override public String getLocation() { return options.location(); } protected void fireBundleAdded(File file) throws IOException { if (registry == null) return; List<RepositoryListenerPlugin> listeners = registry.getPlugins(RepositoryListenerPlugin.class); if (listeners.isEmpty()) return; Jar jar = new Jar(file); try { for (RepositoryListenerPlugin listener : listeners) { try { listener.bundleAdded(this, jar, file); } catch (Exception e) { reporter.error("Repository listener threw an unexpected exception: %s", e, e); } finally {} } } finally { jar.close(); } } @Override public void setRegistry(Registry registry) { this.registry = registry; } private void init() throws Exception { if (index == null) { reporter.trace("init " + indexFile); index = new Index(indexFile); index.setRecurse(indexRecurse); index.setReporter(reporter); } } public void add(String bsn, Version version) throws Exception { reporter.trace("Add %s %s", bsn, version); RevisionRef ref = getRevisionRef(bsn, version); add(ref); } void add(RevisionRef ref) throws Exception { // Cleanup existing versions // We remove everything between [mask(v), v) Version newVersion = toVersion(ref.baseline, ref.qualifier); reporter.trace("New version " + ref.bsn + " " + newVersion); Version newMask = mask(newVersion); List<Version> toBeDeleted = new ArrayList<Version>(); for (Version existingVersion : index.getVersions(ref.bsn)) { Version existingMask = mask(existingVersion); if (newMask.equals(existingMask)) { reporter.trace("delete %s-%s", ref.bsn, existingVersion); toBeDeleted.add(existingVersion); } } for (Version v : toBeDeleted) index.delete(ref.bsn, v); reporter.trace("add %s-%s", ref.bsn, newVersion); index.addRevision(ref); getLocal(ref, null, new LocalDownloadListener()); if (index.isRecurse()) { Iterable<RevisionRef> refs = getClosure(ref); for (RevisionRef r : refs) { index.addRevision(r); getLocal(ref, null, new LocalDownloadListener()); } } index.save(indexFile); } /** * @param ref * @return * @throws Exception */ private Iterable<RevisionRef> getClosure(RevisionRef ref) throws Exception { return library.getClosure(ref.revision, false); } public void delete(String bsn, Version version, boolean immediate) throws Exception { reporter.trace("Delete %s %s", bsn, version); Library.RevisionRef resource = index.getRevisionRef(bsn, version); if (resource != null) { boolean removed = index.delete(bsn, version); reporter.trace("Was present " + removed); index.save(); } else reporter.trace("No such resource"); } public void delete(String bsn) throws Exception { reporter.trace("Delete %s", bsn); Set<Version> set = new HashSet<Version>(index.getVersions(bsn)); reporter.trace("Versions %s", set); for (Version version : set) { delete(bsn, version, true); } } public boolean dropTarget(URI uri) throws Exception { try { init(); reporter.trace("dropTarget " + uri); Matcher m = JPM_REVISION_URL_PATTERN.matcher(uri.toString()); if (!m.matches()) { reporter.trace("not a proper url to drop " + uri); return false; } Revision revision = getRevision(new Coordinate(m.group(1), m.group(2), m.group(3), m.group(4))); if (revision == null) { reporter.error("no revision found for %s", uri); return false; } Library.RevisionRef resource = index.getRevisionRef(revision._id); if (resource != null) { reporter.trace("resource already loaded " + uri); return true; } RevisionRef ref = new RevisionRef(revision); reporter.trace("adding revision " + ref); add(ref); return true; } catch (Exception e) { e.printStackTrace(); throw e; } } /* * A utility to open a URL on different OS's browsers * @param url the url to open * @throws IOException */ void open(String url) { try { try { Desktop desktop = Desktop.getDesktop(); desktop.browse(new URI(url)); return; } catch (Throwable e) { } String os = System.getProperty("os.name").toLowerCase(); Runtime rt = Runtime.getRuntime(); if (os.indexOf("mac") >= 0 || os.indexOf("darwin") >= 0) { rt.exec("open " + url); } else if (os.indexOf("win") >= 0) { // this doesn't support showing urls in the form of // "page.html#nameLink" rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.indexOf("nix") >= 0 || os.indexOf("nux") >= 0) { // Do a best guess on unix until we get a platform independent // way // Build a list of browsers to try, in this order. String[] browsers = { "epiphany", "firefox", "mozilla", "konqueror", "netscape", "opera", "links", "lynx" }; // Build a command string which looks like // "browser1 "url" || browser2 "url" ||..." StringBuffer cmd = new StringBuffer(); for (int i = 0; i < browsers.length; i++) cmd.append((i == 0 ? "" : " || ") + browsers[i] + " \"" + url + "\" "); rt.exec(new String[] { "sh", "-c", cmd.toString() }); } else reporter.trace("Open " + url); } catch (Exception e) { throw new RuntimeException(e); } } /** * Answer the resource descriptors from a URL */ // @Override public Set<ResourceDescriptor> getResources(URI url, boolean includeDependencies) throws Exception { try { Matcher m = JPM_REVISION_URL_PATTERN.matcher(url.toString()); if (!m.matches()) { return null; } Set<ResourceDescriptor> resources = new HashSet<ResourceDescriptor>(); Revision revision = getRevision(new Coordinate(m.group(1), m.group(2), m.group(3), m.group(4))); if (revision != null) { ResourceDescriptor rd = createResourceDescriptor(new RevisionRef(revision)); resources.add(rd); if (includeDependencies) { for (RevisionRef dependency : library.getClosure(revision._id, false)) { ResourceDescriptor dep = createResourceDescriptor(dependency); dep.dependency = true; resources.add(dep); } } } return resources; } catch (Exception e) { e.printStackTrace(); return Collections.emptySet(); } } private ResourceDescriptor createResourceDescriptor(RevisionRef ref) throws Exception { ResourceDescriptorImpl rd = new ResourceDescriptorImpl(ref); rd.bsn = ref.bsn; rd.version = toVersion(ref.baseline, ref.qualifier); rd.description = ref.description; rd.id = ref.revision; rd.included = getIndex().getRevisionRef(rd.id) != null; rd.phase = toPhase(ref.phase); return rd; } private Index getIndex() throws Exception { init(); return index; } private aQute.bnd.service.repository.Phase toPhase(aQute.service.library.Library.Phase phase) { switch (phase) { case STAGING : return aQute.bnd.service.repository.Phase.STAGING; case LOCKED : return aQute.bnd.service.repository.Phase.LOCKED; case MASTER : return aQute.bnd.service.repository.Phase.MASTER; case RETIRED : return aQute.bnd.service.repository.Phase.RETIRED; case WITHDRAWN : return aQute.bnd.service.repository.Phase.WITHDRAWN; default : return null; } } // @Override public Set<ResourceDescriptor> query(String query) throws Exception { Set<ResourceDescriptor> resources = new HashSet<ResourceDescriptor>(); RevisionRef master = null; RevisionRef staging = null; for (Program p : library.getQueryPrograms(query, 0, 100)) { for (RevisionRef ref : p.revisions) { if (master == null && ref.phase == Library.Phase.MASTER) { master = ref; } else if (staging != null && ref.phase == Library.Phase.STAGING) { staging = ref; } } if (master != null) resources.add(createResourceDescriptor(master)); if (staging != null) resources.add(createResourceDescriptor(staging)); } return resources; } // @Override public boolean addResource(ResourceDescriptor resource) throws Exception { if (resource instanceof ResourceDescriptorImpl) { RevisionRef ref = ((ResourceDescriptorImpl) resource).revision; if (index.addRevision(ref)) { index.save(); return true; } } return false; } // @Override public Set<ResourceDescriptor> findResources(org.osgi.resource.Requirement requirement, boolean includeDependencies) throws Exception { FilterParser fp = new FilterParser(); aQute.bnd.osgi.resource.FilterParser.Expression expression = fp.parse(requirement.getDirectives().get("filter")); String query = expression.query(); if ( query == null) { return Collections.emptySet(); } return query(query); } /** * Check if there is at least one network interface up and running so we * have internet access. */ private boolean isConnected() throws SocketException { if (offline) return false; try { for (Enumeration<NetworkInterface> e = NetworkInterface.getNetworkInterfaces(); e.hasMoreElements();) { NetworkInterface interf = e.nextElement(); if (!interf.isLoopback() && interf.isUp()) return true; } } catch (SocketException e) { // ignore, we assume we're offline } return false; } /** * @param bsn * @return * @throws Exception */ private Program getProgram(final String bsn, boolean force) throws Exception { Program p = cache.getProgram(bsn); if (p == null || force) { p = library.getProgram(Library.OSGI_GROUP, bsn); if (p != null) cache.putProgram(bsn, p); } return p; } /** * @param sha * @return * @throws Exception */ private Revision getRevision(Coordinate c) throws Exception { return library.getRevisionByCoordinate(c); } public byte[] getDigest() throws Exception { init(); return index.getRevisions()._id; } /** * Ensure that the revisions is updated * * @throws Exception */ byte[] sync() throws Exception { Revisions revisions = index.getRevisions(); if (!index.isSynced()) { reporter.trace("Syncing repo indexes"); library.createRevisions(revisions); index.setSynced(revisions._id); } return revisions._id; } /** * Compare a list of versions against the available versions and return the * desired list. This will remove all staged version that are 'below' a * master. */ public SortedSet<Version> update(SortedSet<Version> input, Program p) throws Exception { Map<Version,Version> mapped = new HashMap<Version,Version>(); for (RevisionRef ref : p.revisions) { Version a = toVersion(ref.baseline, ref.qualifier); Version mask = mask(a); Version highest = mapped.get(mask); if (highest == null || a.compareTo(highest) > 0 || ref.phase == Library.Phase.MASTER) mapped.put(mask, a); } HashSet<Version> output = new HashSet<Version>(); for (Version i : input) { Version mask = mask(i); Version found = mapped.get(mask); if (found != null) output.add(found); else reporter.error("[update] Missing version %s for bsn %s", mask, p.last.bsn); } return new SortedList<Version>(output); } private static Version mask(Version in) { return new Version(in.getMajor(), in.getMinor()); } /** * Remove any unused entries in this repository * * @throws Exception */ void cleanUp() throws Exception { Workspace workspace = registry.getPlugin(Workspace.class); Set<Container> set = new HashSet<Container>(); for (Project project : workspace.getAllProjects()) { set.addAll(project.getBuildpath()); set.addAll(project.getRunbundles()); set.addAll(project.getRunpath()); set.addAll(project.getTestpath()); set.addAll(project.getBootclasspath()); set.addAll(project.getClasspath()); // // This should be replaced with project.getRunfw() // String s = project.getProperty(Constants.RUNFW); List<Container> bundles = project.getBundles(Strategy.HIGHEST, s, Constants.RUNFW); set.addAll(bundles); File base = project.getBase(); for (File sub : base.listFiles()) { if (sub.getName().endsWith(".bndrun")) { Project bndrun = new Project(workspace, base, sub); try { set.addAll(bndrun.getRunbundles()); set.addAll(bndrun.getRunpath()); set.addAll(bndrun.getTestpath()); set.addAll(bndrun.getBootclasspath()); set.addAll(bndrun.getClasspath()); } finally { bndrun.close(); } } } } Set<RevisionRef> refs = new HashSet<RevisionRef>(index.getRevisionRefs()); Set<RevisionRef> keep = new HashSet<RevisionRef>(); for (Container libOrRev : set) { for (Container c : libOrRev.getMembers()) { reporter.trace("Dependency " + c); if (!Verifier.isVersion(c.getVersion())) continue; RevisionRef ref = index.getRevisionRef(c.getBundleSymbolicName(), new Version(c.getVersion())); if (ref != null) refs.remove(ref); else { // missing! reporter.trace("Missing " + c.getBundleSymbolicName()); Coordinate coord = new Coordinate(c.getBundleSymbolicName()); Revision rev = library.getRevisionByCoordinate(coord); if (rev != null) { index.addRevision(new RevisionRef(rev)); } else System.out.printf("not found %s\n", c); } keep.add(ref); } } for (RevisionRef ref : refs) { index.delete(ref.bsn, Index.toVersion(ref)); } index.save(); } /** * Get a Resource Descriptor for a given bsn/version * * @param bsn * @param version * @return * @throws Exception */ public ResourceDescriptor getDescriptor(String bsn, Version version) throws Exception { RevisionRef revisionRef = index.getRevisionRef(bsn, version); if (revisionRef == null) return null; return createResourceDescriptor(revisionRef); } /** * Copy a string to the clipboard */ void toClipboard(String bsn, Version base) { Version nextMajor = new Version(base.getMajor() + 1, 0, 0); toClipboard(bsn + ";version='[" + base.getWithoutQualifier() + "," + nextMajor + ")'"); } void toClipboard(String s) { if (s == null) return; StringSelection stringSelection = new StringSelection(s); Clipboard clpbrd = Toolkit.getDefaultToolkit().getSystemClipboard(); clpbrd.setContents(stringSelection, null); } @Override public String toString() { byte[] digest; try { digest = getDigest(); } catch (Exception e) { throw new RuntimeException(e); } return "JpmRepository [writable=" + canWrite() + ", " + (getName() != null ? "name=" + getName() + ", " : "") + (getLocation() != null ? "location=" + getLocation() + ", " : "") + (digest != null ? "digest=" + Hex.toHexString(digest) : "") + "]"; } }
biz.aQute.repository/src/aQute/bnd/jpm/Repository.java
package aQute.bnd.jpm; import static aQute.lib.io.IO.*; import java.awt.*; import java.awt.datatransfer.*; import java.io.*; import java.net.*; import java.util.*; import java.util.List; import java.util.concurrent.*; import java.util.regex.*; import aQute.bnd.build.*; import aQute.bnd.build.Container; import aQute.bnd.osgi.*; import aQute.bnd.osgi.Verifier; import aQute.bnd.osgi.resource.*; import aQute.bnd.service.*; import aQute.bnd.service.repository.*; import aQute.bnd.version.*; import aQute.jpm.facade.repo.*; import aQute.jsonrpc.proxy.*; import aQute.lib.collections.*; import aQute.lib.converter.*; import aQute.lib.hex.*; import aQute.lib.io.*; import aQute.lib.justif.*; import aQute.lib.settings.*; import aQute.libg.cryptography.*; import aQute.libg.glob.*; import aQute.libg.reporter.*; import aQute.rest.urlclient.*; import aQute.service.library.*; import aQute.service.library.Library.Program; import aQute.service.library.Library.Revision; import aQute.service.library.Library.RevisionRef; import aQute.service.reporter.*; /** * A bnd repository based on the jpm4j server. */ public class Repository implements Plugin, RepositoryPlugin, Closeable, Refreshable, Actionable, RegistryPlugin, SearchableRepository, InfoRepository { public static final String REPO_DEFAULT_URI = "http://repo.jpm4j.org/"; private static final PutOptions DEFAULT_OPTIONS = new PutOptions(); private final String DOWN_ARROW = " \u21E9"; protected final DownloadListener[] EMPTY_LISTENER = new DownloadListener[0]; private Pattern SHA = Pattern.compile( "([A-F0-9][a-fA-F0-9]){20,20}", Pattern.CASE_INSENSITIVE); private final Justif j = new Justif(80, new int[] { 20, 28, 36, 44 }); private Settings settings = new Settings(); private boolean canwrite; final MultiMap<File,DownloadListener> queues = new MultiMap<File,RepositoryPlugin.DownloadListener>(); private final Pattern JPM_REVISION_URL_PATTERN = Pattern .compile("https?://.+#!?/p/([^/]+)/([^/]+)/([^/]*)/([^/]+)"); private Options options; Reporter reporter = new ReporterAdapter(System.out); /** * Maintains the index of what we've downloaded so far. */ private File indexFile; private boolean indexRecurse; Index index; private boolean offline; private Registry registry; StoredRevisionCache cache; Set<File> notfound = new HashSet<File>(); private Set<String> notfoundref = new HashSet<String>(); final Semaphore limitDownloads = new Semaphore(12); private JpmRepo library; private String depositoryGroup; private String depositoryName; private URLClient urlc; private String location; private URLClient depository; private String email; private String name; URI url; /** * Reports downloads but does never block on them. This is a best effort, if * it fails, we can still get them later. */ class LocalDownloadListener implements DownloadListener { @Override public void success(File file) throws Exception { reporter.trace("downloaded %s", file); } @Override public void failure(File file, String reason) throws Exception { reporter.trace("failed to downloaded %s", file); } @Override public boolean progress(File file, int percentage) throws Exception { reporter.trace("Downloadedin %s %s%", file, percentage); return true; } } interface Options { /** * The URL to the remote repository. Default is http://repo.jpm4j.org * * @return */ URI url(); /** * The group of a depository,optional. * * @return */ String depository_group(); /** * The name of the depository * * @return */ String depository_name(); /** * The email address of the user * * @return */ String email(); /** * Where the index file is stored. The default should reside in the * workspace and be part of the scm * * @return */ String index(); /** * The cache location, default is ~/.bnd/cache. This file is relative * from the users home directory if not absolute. * * @return */ String location(); /** * Set the settings */ String settings(); /** * The name of the repo * * @return */ String name(); /** * Fetch dependencies automatically */ boolean recurse(); boolean trace(); } /** * Get a revision. */ @Override public File get(String bsn, Version version, Map<String,String> attrs, final DownloadListener... listeners) throws Exception { init(); // Check if we're supposed to have this RevisionRef resource = index.getRevisionRef(bsn, version); if (resource == null) return null; else return getLocal(resource, attrs, listeners); } /** * The index indicates we're allowed to have this one. So check if we have * it cached or if we need to download it. */ private File getLocal(RevisionRef resource, Map<String,String> attrs, DownloadListener... downloadListeners) throws Exception { File sources = cache.getPath(resource.bsn, Index.toVersion(resource).toString(), resource.revision, true); if (sources.isFile()) { for (DownloadListener dl : downloadListeners) { dl.success(sources); } return sources; } File file = cache.getPath(resource.bsn, Index.toVersion(resource).toString(), resource.revision); scheduleDownload(file, resource.revision, resource.size, resource.urls, downloadListeners); return file; } /** * Schedule a download, handling the listeners * * @param url */ private void scheduleDownload(final File file, final byte[] sha, final long size, final Set<URI> urls, DownloadListener... listeners) throws Exception { synchronized (notfound) { if (notfound.contains(file)) { failure(listeners, file, "Not found"); return; } } if (file.isFile()) { if (file.length() == size) { // Already exists, done success(listeners, file); reporter.trace("was in cache"); return; } reporter.error("found file but of different length %s, will refetch", file); } else { reporter.trace("not in cache %s", file + " " + queues); } // Check if we need synchronous if (listeners.length == 0) { reporter.trace("in cache, no listeners"); cache.download(file, urls, sha); return; } // // With download listeners we need to be careful to queue them // appropriately. Don't want to download n times because // requests arrive during downloads. // synchronized (queues) { List<DownloadListener> list = queues.get(file); boolean first = list == null || list.isEmpty(); for (DownloadListener l : listeners) { queues.add(file, l); } if (!first) { // return, file is being downloaded by another and that // other will signal the download listener. reporter.trace("someone else is downloading our file " + queues.get(file)); return; } } try { reporter.trace("starting thread for " + file); // Limit the total downloads going on at the same time limitDownloads.acquire(); Thread t = new Thread("Downloading " + file) { public void run() { try { reporter.trace("downloading in background " + file); cache.download(file, urls, sha); success(queues.get(file).toArray(EMPTY_LISTENER), file); } catch (FileNotFoundException e) { synchronized (notfound) { reporter.error("Not found %s", e, file); notfound.add(file); } synchronized (queues) { failure(queues.get(file).toArray(EMPTY_LISTENER), file, e.toString()); } } catch (Throwable e) { e.printStackTrace(); reporter.error("failed to download %s: %s", e, file); synchronized (queues) { failure(queues.get(file).toArray(EMPTY_LISTENER), file, e.toString()); } } finally { synchronized (queues) { queues.remove(file); } reporter.trace("downloaded " + file); // Allow other downloads to start limitDownloads.release(); } } }; t.start(); } catch (Exception e) { // Is very unlikely to happen but we must ensure the // listeners are called and we're at the head of the queue reporter.error("Starting a download for %s failed %s", file, e); synchronized (queues) { failure(queues.get(file).toArray(EMPTY_LISTENER), file, e.toString()); queues.remove(file); } } } /** * API method */ @Override public boolean canWrite() { return canwrite; } /** * Put an artifact in the repo */ @Override public PutResult put(InputStream in, PutOptions options) throws Exception { if (!canwrite) throw new UnsupportedOperationException( "This is not a writeable repo, s" + "et depository.group, depository.name and properties and ensure the email property is in your global settings"); assert in != null; assert depositoryGroup != null; assert depositoryName != null; init(); if (options == null) options = DEFAULT_OPTIONS; reporter.trace("syncing"); sync(); File file = File.createTempFile("put", ".jar"); file.deleteOnExit(); try { reporter.trace("creating tmp copy"); copy(in, file); if (depository == null) { URI url = library.depository(depositoryGroup, depositoryName); reporter.trace("send to url " + url); depository = new URLClient(url.toString()); setCredentials(depository); reporter.trace("credentials " + depository); } byte[] digest = options.digest == null ? SHA1.digest(file).digest() : options.digest; String path = Hex.toHexString(digest); reporter.trace("putting " + path); Library.RevisionRef d = depository.put(path, file, Library.RevisionRef.class, null); if (d == null) { reporter.error("Cant deposit %s", file); return null; } if (!Arrays.equals(digest, d.revision)) throw new Exception("Invalid digest"); // Copy it to our cache cache.add(d, file); index.addRevision(d); index.save(); // Coordinator PutResult putr = new PutResult(); putr.artifact = depository.getUri(path); putr.digest = digest; return putr; } catch (Exception e) { e.printStackTrace(); throw e; } finally { file.delete(); } } /** * If we have no search or an empty search we list our index. Otherwise we * query remotely. */ Pattern COMMAND_P = Pattern.compile("^([^/]*)/(!?[lmsprw])([^/]*)$"); @Override public List<String> list(String query) throws Exception { init(); Set<String> bsns = new HashSet<String>(); if (query == null || query.trim().isEmpty()) query = "*"; else query = query.trim(); Library.Phase phase = null; boolean negated = false; Matcher m = COMMAND_P.matcher(query); if (m.matches()) { query = m.group(1) + m.group(3); String cmd = m.group(2); if (cmd.startsWith("!")) { negated = true; cmd = cmd.substring(1); } char c = Character.toLowerCase(cmd.charAt(0)); switch (c) { case 'l' : phase = Library.Phase.LOCKED; break; case 'p' : phase = Library.Phase.PENDING; break; case 's' : phase = Library.Phase.STAGING; break; case 'm' : phase = Library.Phase.MASTER; break; case 'r' : phase = Library.Phase.RETIRED; break; case 'w' : phase = Library.Phase.WITHDRAWN; break; } reporter.trace("Phase is " + c + " " + phase); } Glob glob = null; try { glob = new Glob(query); } catch (Exception e) { glob = new Glob("*"); } bsn: for (String bsn : index.getBsns()) { if (glob.matcher(bsn).matches()) { if (phase != null) { boolean hasPhase = false; revision: for (Version version : index.getVersions(bsn)) { RevisionRef ref = index.getRevisionRef(bsn, version); if (ref.phase == phase) { hasPhase = true; break revision; } } if (hasPhase == negated) continue bsn; } bsns.add(bsn); } } List<String> result = new ArrayList<String>(bsns); Collections.sort(result); return result; } /** * List the versions belonging to a bsn */ @Override public SortedSet<Version> versions(String bsn) throws Exception { init(); SortedSet<Version> versions = index.getVersions(bsn); if (!versions.isEmpty() || !index.isLearning()) { return versions; } return versions; } /* * Convert a baseline/qualifier to a version */ static Version toVersion(String baseline, String qualifier) { if (qualifier == null || qualifier.isEmpty()) return new Version(baseline); else return new Version(baseline + "." + qualifier); } /* * Return if bsn is a SHA */ private boolean isSha(String bsn) { return SHA.matcher(bsn).matches(); } @Override public String getName() { return name == null ? "jpm4j" : name; } @Override public void setProperties(Map<String,String> map) { reporter.trace("CLs " + getClass().getClassLoader() + " " + URLClient.class.getClassLoader()); try { options = Converter.cnv(Options.class, map); setOptions(options); } catch (Exception e) { throw new RuntimeException(e); } } public void setOptions(Options options) { try { location = options.location(); if (location == null) location = "~/.bnd/shacache"; this.name = options.name(); if (options.settings() != null) { settings = new Settings(options.settings()); } email = options.email(); if (email == null) email = settings.getEmail(); url = options.url(); if (url == null) url = new URI(REPO_DEFAULT_URI); urlc = new URLClient(url.toString()); if (email != null && !email.contains("anonymous")) setCredentials(urlc); urlc.setReporter(reporter); File cacheDir = IO.getFile(IO.home, location); cacheDir.mkdirs(); if (!cacheDir.isDirectory()) throw new IllegalArgumentException("Not able to create cache directory " + cacheDir); String indexPath = options.index(); if (indexPath == null) throw new IllegalArgumentException("Index file not set (index) "); indexFile = IO.getFile(indexPath); if (indexFile.isDirectory()) throw new IllegalArgumentException("Index file is a directory instead of a file " + indexFile.getAbsolutePath()); indexRecurse = options.recurse(); cache = new StoredRevisionCache(cacheDir, settings); library = JSONRPCProxy.createRPC(JpmRepo.class, urlc, "jpm"); if (options.index() == null) throw new IllegalArgumentException("Index file not set"); canwrite = false; if (options.depository_group() != null) { depositoryGroup = options.depository_group(); depositoryName = options.depository_name(); if (depositoryName == null) depositoryName = "home"; canwrite = email != null; } } catch (Exception e) { if (reporter != null) reporter.exception(e, "Creating options"); throw new RuntimeException(e); } } private void setCredentials(URLClient urlc) throws UnknownHostException, Exception { urlc.credentials(email, InetAddress.getLocalHost().getHostName(), settings.getPublicKey(), settings.getPrivateKey()); } @Override public void setReporter(Reporter processor) { reporter = processor; if (index != null) index.setReporter(reporter); if (urlc != null) urlc.setReporter(processor); } @Override public boolean refresh() throws Exception { index = new Index(indexFile); index.setRecurse(indexRecurse); cache.refresh(); notfound.clear(); notfoundref.clear(); return true; } /** * Return the actions for this repository */ @Override public Map<String,Runnable> actions(Object... target) throws Exception { init(); boolean connected = isConnected(); if (target == null) return null; if (target.length == 0) return getRepositoryActions(); final String bsn = (String) target[0]; Program careful = null; if (connected) try { careful = getProgram(bsn, true); } catch (Exception e) { reporter.error("Offline? %s", e); } final Program p = careful; if (target.length == 1) return getProgramActions(bsn, p); if (target.length >= 2) { final Version version = (Version) target[1]; return getRevisionActions(p, bsn, version); } return null; } /** * @param p * @param bsn * @param version * @return * @throws Exception */ static Pattern JAR_FILE_P = Pattern.compile("(https?:.+)(\\.jar)"); private Map<String,Runnable> getRevisionActions(final Program program, final String bsn, final Version version) throws Exception { final Library.RevisionRef resource = index.getRevisionRef(bsn, version); Map<String,Runnable> map = new LinkedHashMap<String,Runnable>(); map.put("Inspect Revision", new Runnable() { public void run() { open(url + "#!/p/sha/" + Hex.toHexString(resource.revision) + "//0.0.0"); } }); map.put("Copy reference", new Runnable() { @Override public void run() { toClipboard(bsn, version); } }); Runnable doUpdate = getUpdateAction(program, resource); if (doUpdate != null) { map.put("Update to " + doUpdate, doUpdate); } else { map.put("-Update", null); } map.put("Delete", new Runnable() { public void run() { try { delete(bsn, version, true); } catch (Exception e) { e.printStackTrace(); } } }); if (isConnected()) { final File sourceFile = cache.getPath(bsn, version.toString(), resource.revision, true); Runnable run = null; if (!sourceFile.isFile()) { URL sourceURI = null; for (URI uri : resource.urls) { try { Matcher m = JAR_FILE_P.matcher(uri.toString()); if (m.matches()) { String stem = m.group(1); URL src = new URL(stem + "-sources.jar"); HttpURLConnection conn = (HttpURLConnection) src.openConnection(); conn.setRequestMethod("HEAD"); if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) { sourceURI = src; continue; } } } catch (Exception e) { // ignore } } if (sourceURI != null) { run = createAddSourceAction(bsn, version, resource, sourceFile, sourceURI); } } else reporter.trace("sources in %s", sourceFile); if (run != null) map.put("Add Sources", run); else map.put("-Add Sources", null); } if (cache.hasSources(bsn, version.toString(), resource.revision)) { map.put("Remove Sources", new Runnable() { @Override public void run() { cache.removeSources(bsn, version.toString(), resource.revision); } }); } return map; } /** * @param bsn * @param version * @param resource * @param withSources * @param src * @return */ protected Runnable createAddSourceAction(final String bsn, final Version version, final Library.RevisionRef resource, final File withSources, final URL src) { Runnable run; run = new Runnable() { public void run() { try { // Sync downloads so that we do not assume the // binary is already there ... so call without // listeners. get(bsn, version, null); File file = cache.getPath(bsn, version.toString(), resource.revision); Jar binary = new Jar(file); try { Jar sources = new Jar(src.getFile(), src.openStream()); binary.setDoNotTouchManifest(); try { binary.addAll(sources, null, "OSGI-OPT/src"); binary.write(withSources); } finally { sources.close(); } } finally { binary.close(); } } catch (Exception e) { throw new RuntimeException(e); } } }; return run; } /** * @param bsn * @param version * @param resource * @param withSources * @param src * @return */ protected Runnable createRemoveSourceAction(final String bsn, final Version version, final Library.RevisionRef resource, final File withSources, final URL src) { Runnable run; run = new Runnable() { public void run() { try { // Sync downloads so that we do not assume the // binary is already there ... so call without // listeners. get(bsn, version, null); File file = cache.getPath(bsn, version.toString(), resource.revision); Jar binary = new Jar(file); try { Jar sources = new Jar(src.getFile(), src.openStream()); try { binary.addAll(sources, null, "OSGI-OPT/src"); binary.write(withSources); } finally { sources.close(); } } finally { binary.close(); } } catch (Exception e) { throw new RuntimeException(e); } } }; return run; } /** * @param bsn * @param p * @return * @throws Exception */ private Map<String,Runnable> getProgramActions(final String bsn, final Program p) throws Exception { Map<String,Runnable> map = new LinkedHashMap<String,Runnable>(); if (p != null) { map.put("Inspect Program", new Runnable() { public void run() { open(url + "#!/p/osgi/" + bsn); } }); final SortedSet<Version> versions = index.getVersions(bsn); if (versions.isEmpty()) map.put("-Copy reference", null); else map.put("Copy reference", new Runnable() { @Override public void run() { toClipboard(bsn, versions.first()); } }); RevisionRef ref = p.revisions.get(0); Version latest = toVersion(ref.baseline, ref.qualifier); for (Version v : index.getVersions(bsn)) { if (v.equals(latest)) { latest = null; break; } } final Version l = latest; String title = "Get Latest"; if (latest == null) title = "-" + title; else title += " " + l + ref.phase; map.put(title, new Runnable() { public void run() { try { add(bsn, l); } catch (Exception e) { throw new RuntimeException(e); } } }); Runnable updateAction = getUpdateAction(p, bsn); if (updateAction != null) map.put("Update " + updateAction, updateAction); else map.put("-Update", null); } else { map.put("-Update (offline)", null); } map.put("Delete", new Runnable() { public void run() { try { delete(bsn); } catch (Exception e) { throw new RuntimeException(e); } } }); return map; } /** * @return * @throws Exception */ private Map<String,Runnable> getRepositoryActions() throws Exception { Map<String,Runnable> map = new LinkedHashMap<String,Runnable>(); map.put("Inspect", new Runnable() { public void run() { try { byte[] revisions = sync(); open(url + "#!/revisions/" + Hex.toHexString(revisions)); } catch (Exception e) { throw new RuntimeException(e); } } }); map.put("Delete Cache", new Runnable() { @Override public void run() { try { cache.deleteAll(); } catch (Exception e) { reporter.error("Deleting cache %s", e); } } }); map.put("Refresh", new Runnable() { @Override public void run() { try { refresh(); } catch (Exception e) { reporter.error("Refreshing %s", e); } } }); map.put("Update All", new Runnable() { @Override public void run() { try { updateAll(); } catch (Exception e) { reporter.error("Update all %s", e); } } }); map.put("Download All", new Runnable() { @Override public void run() { try { DownloadListener dl = new DownloadListener() { @Override public void success(File file) throws Exception { reporter.trace("downloaded %s", file); } @Override public void failure(File file, String reason) throws Exception { reporter.trace("failed to download %s becasue %s", file, reason); } @Override public boolean progress(File file, int percentage) throws Exception { reporter.progress(((float) percentage) / 100, "downloading %s", file); return true; } }; for (String bsn : list(null)) { for (Version v : versions(bsn)) { get(bsn, v, null, dl); } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); map.put("Remove unused/Add missing", new Runnable() { @Override public void run() { try { cleanUp(); } catch (Exception e) { e.printStackTrace(); } } }); String title = "Learning{Unknown resources are an error, select to learn}"; if (index.isLearning()) { title = "!Learning{Will attempt to fetch unknown resources, select to make this an error}"; } map.put(title, new Runnable() { @Override public void run() { try { index.setLearning(!index.isLearning()); index.save(); } catch (Exception e) { reporter.error("Learning %s", e); } } }); title = "Recurse{Do not fetch dependencies automatically}"; if (index.isRecurse()) { title = "!Recurse{Fetch dependencies automatically}"; } map.put(title, new Runnable() { @Override public void run() { try { index.setRecurse(!index.isRecurse()); index.save(); } catch (Exception e) { reporter.error("Learning %s", e); } } }); return map; } @Override public String tooltip(Object... target) throws Exception { init(); if (target == null || target.length == 0) return repositoryTooltip(); if (target.length == 1) return programTooltip((String) target[0]); if (target.length == 2) return revisionTooltip((String) target[0], (Version) target[1]); return "Hmm, have no idea on what object you want a tooltip ..."; } private String repositoryTooltip() throws Exception { Formatter f = new Formatter(); try { f.format("%s\n", this); if (depositoryGroup != null && depositoryName != null) { f.format("\n[Depository]\n"); f.format("Group: %s\n", depositoryGroup); f.format("Depository: %s\n", depositoryName); f.format("Email: %s\n", email); f.format("Writable: %s %s\n", canwrite, (email == null ? "(no email set, see 'bnd settings email=...')" : "")); f.format("Public key: %s…\n", Hex.toHexString(settings.getPublicKey()).substring(0, 16)); } f.format("\n[Files]\nCache location %s\n", options.location()); f.format("Index file %s\n", options.index()); f.format("Number of bsns %s\n", index.getBsns().size()); f.format("Number of revs %s\n", index.getRevisionRefs().size()); f.format("Dirty %s\n", index.isDirty()); return f.toString(); } finally { f.close(); } } private String programTooltip(String bsn) throws Exception { Program p = getProgram(bsn, false); if (p != null) { Formatter sb = new Formatter(); try { if (p.wiki != null && p.wiki.text != null) sb.format("%s\n", p.wiki.text.replaceAll("#\\s?", "")); else if (p.last.description != null) sb.format("%s\n", p.last.description); else sb.format("No description\n"); j.wrap((StringBuilder) sb.out()); return sb.toString(); } finally { sb.close(); } } return null; } private String revisionTooltip(String bsn, Version version) throws Exception { RevisionRef r = getRevisionRef(bsn, version); if (r == null) return null; Formatter sb = new Formatter(); try { sb.format("[%s:%s", r.groupId, r.artifactId); if (r.classifier != null) { sb.format(":%s", r.classifier); } sb.format("@%s] %s\n\n", r.version, r.phase); if (r.releaseSummary != null) sb.format("%s\n\n", r.releaseSummary); if (r.description != null) sb.format("%s\n\n", r.description.replaceAll("#\\s*", "")); sb.format("Size: %s\n", size(r.size, 0)); sb.format("SHA-1: %s\n", Hex.toHexString(r.revision)); sb.format("Age: %s\n", age(r.created)); sb.format("URL: %s\n", r.urls); File f = cache.getPath(bsn, version.toString(), r.revision); if (f.isFile() && f.length() == r.size) sb.format("Cached %s\n", f); else sb.format("Not downloaded\n"); Program p = getProgram(bsn, false); if (p != null) { Runnable update = getUpdateAction(p, r); if (update != null) { sb.format(DOWN_ARROW + " This version can be updated to " + update); } } File sources = cache.getPath(bsn, version.toString(), r.revision, true); if (sources.isFile()) sb.format("Has sources: %s\n", sources.getAbsolutePath()); else sb.format("No sources"); j.wrap((StringBuilder) sb.out()); return sb.toString(); } finally { sb.close(); } } private List<RevisionRef> getRevisionRefs(String bsn) throws Exception { String classifier = null; String parts[] = bsn.split("__"); if (parts.length == 3) { bsn = parts[0] + "__" + parts[1]; classifier = parts[2]; } Program program = getProgram(bsn, false); if (program != null) { List<RevisionRef> refs = new ArrayList<Library.RevisionRef>(); for (RevisionRef r : program.revisions) { if (eq(classifier, r.classifier)) refs.add(r); } return refs; } return Collections.emptyList(); } /** * Find a revisionref for a bsn/version * * @param bsn * @param version * @return * @throws Exception */ private RevisionRef getRevisionRef(String bsn, Version version) throws Exception { // Handle when we have a sha reference String id = bsn + "-" + version; if (notfoundref.contains(id)) return null; if (isSha(bsn) && version.equals(Version.LOWEST)) { Revision r = getRevision(new Coordinate(bsn)); if (r == null) return null; return new RevisionRef(r); } reporter.trace("Looking for %s-%s", bsn, version); for (RevisionRef r : getRevisionRefs(bsn)) { Version v = toVersion(r.baseline, r.qualifier); if (v.equals(version)) return r; } notfoundref.add(id); return null; } private boolean eq(String a, String b) { if (a == null) a = ""; if (b == null) b = ""; return a.equals(b); } private String age(long created) { if (created == 0) return "unknown"; long diff = (System.currentTimeMillis() - created) / (1000 * 60 * 60); if (diff < 48) return diff + " hours"; diff /= 24; if (diff < 14) return diff + " days"; diff /= 7; if (diff < 8) return diff + " weeks"; diff /= 4; if (diff < 24) return diff + " months"; diff /= 12; return diff + " years"; } String[] sizes = { "bytes", "Kb", "Mb", "Gb", "Tb", "Pb", "Showing off?" }; private String size(long size, int power) { if (power >= sizes.length) return size + " Pb"; if (size < 1000) return size + sizes[power]; return size(size / 1000, power + 1); } /** * Update all bsns * * @throws Exception */ void updateAll() throws Exception { for (String bsn : index.getBsns()) { update(bsn); } } /** * Update all baselines for a bsn * * @param bsn * @throws Exception */ void update(String bsn) throws Exception { Program program = getProgram(bsn, false); Runnable updateAction = getUpdateAction(program, bsn); if (updateAction == null) return; reporter.trace("update bsn %s", updateAction); updateAction.run(); } /** * Update a bsn * * @throws Exception */ Runnable getUpdateAction(Program program, String bsn) throws Exception { final Set<Runnable> update = new TreeSet<Runnable>(); for (Version v : index.getVersions(bsn)) { RevisionRef resource = index.getRevisionRef(bsn, v); Runnable updateAction = getUpdateAction(program, resource); if (updateAction != null) update.add(updateAction); } if (update.isEmpty()) return null; return new Runnable() { @Override public void run() { for (Runnable r : update) { r.run(); } } @Override public String toString() { return update.toString(); } }; } /** * Find a RevisionRef from the Program. We are looking for a version with * the same baseline but a higher qualifier or different phase. * * @param p * @param currentVersion * @return * @throws Exception */ private Runnable getUpdateAction(Program program, final RevisionRef current) throws Exception { RevisionRef candidateRef = null; Version candidate = toVersion(current.baseline, current.qualifier); for (RevisionRef r : program.revisions) { Version refVersion = toVersion(r.baseline, r.qualifier); if (eq(r.classifier, current.classifier)) { if (refVersion.compareTo(candidate) >= 0) { candidate = refVersion; candidateRef = r; } } } if (candidateRef == null) // // We're not present anymore, should never happen ... // return new Runnable() { @Override public void run() { try { index.delete(current.bsn, toVersion(current.baseline, current.qualifier)); } catch (Exception e) { throw new RuntimeException(e); } } public String toString() { return "[delete]"; } }; // // Check if we are not same revision // if (!candidateRef.version.equals(current.version)) { final RevisionRef toAdd = candidateRef; return new Runnable() { // // Replace the current version // public void run() { try { index.delete(current.bsn, toVersion(current.baseline, current.qualifier)); index.addRevision(toAdd); index.save(); } catch (Exception e) { throw new RuntimeException(e); } } public String toString() { return toAdd.version; } }; } // // So now we are the same, check if the phase has changed // if (candidateRef.phase != current.phase) { final RevisionRef toChange = candidateRef; return new Runnable() { @Override public void run() { try { index.delete(current.bsn, toVersion(current.baseline, current.qualifier)); index.addRevision(toChange); index.save(); } catch (Exception e) { throw new RuntimeException(e); } } public String toString() { return "-> " + toChange.phase; } }; } return null; } public void setIndex(File index) { indexFile = index; } void success(DownloadListener[] downloadListeners, File f) { for (DownloadListener l : downloadListeners) { try { l.success(f); } catch (Exception e) { e.printStackTrace(); } } } void failure(DownloadListener[] listeners, File f, String reason) { for (DownloadListener l : listeners) { try { l.failure(f, reason); } catch (Exception e) { e.printStackTrace(); } } } @Override public String title(Object... target) throws Exception { init(); if (target == null || target.length == 0) return getName(); if (target.length == 1 && target[0] instanceof String) { String bsn = (String) target[0]; String title = bsn; return title; } if (target.length == 2 && target[0] instanceof String && target[1] instanceof Version) { String bsn = (String) target[0]; Version version = (Version) target[1]; Library.RevisionRef resource = index.getRevisionRef(bsn, version); if (resource == null) return "[deleted " + version + "]"; String title = getPhase(resource.phase.toString()) + " " + version.toString(); File path = cache.getPath(bsn, version.toString(), resource.revision); if (path.isFile() && path.length() == resource.size) { title += DOWN_ARROW; } if (cache.getPath(bsn, version.toString(), resource.revision, true).isFile()) title += "+"; return title; } return null; } // Temp until we fixed bnd in bndtools enum Phase { STAGING(false, false, false, "[s]"), LOCKED(true, false, false, "[l]"), MASTER(true, true, true, "[m]"), RETIRED( true, false, true, "[r]"), WITHDRAWN(true, false, true, "[x]"), UNKNOWN(true, false, false, "[?]"); boolean locked; boolean listable; boolean permanent; final String symbol; private Phase(boolean locked, boolean listable, boolean permanent, String symbol) { this.locked = locked; this.listable = listable; this.permanent = permanent; this.symbol = symbol; } public boolean isLocked() { return locked; } public boolean isListable() { return listable; } public boolean isPermanent() { return permanent; } public String getSymbol() { return symbol; } } private String getPhase(String phase) { try { return Phase.valueOf(phase).getSymbol(); } catch (Exception e) { return "?"; } } @Override public File getRoot() { return cache.getRoot(); } @Override public void close() throws IOException {} @Override public String getLocation() { return options.location(); } protected void fireBundleAdded(File file) throws IOException { if (registry == null) return; List<RepositoryListenerPlugin> listeners = registry.getPlugins(RepositoryListenerPlugin.class); if (listeners.isEmpty()) return; Jar jar = new Jar(file); try { for (RepositoryListenerPlugin listener : listeners) { try { listener.bundleAdded(this, jar, file); } catch (Exception e) { reporter.error("Repository listener threw an unexpected exception: %s", e, e); } finally {} } } finally { jar.close(); } } @Override public void setRegistry(Registry registry) { this.registry = registry; } private void init() throws Exception { if (index == null) { reporter.trace("init " + indexFile); index = new Index(indexFile); index.setRecurse(indexRecurse); index.setReporter(reporter); } } public void add(String bsn, Version version) throws Exception { reporter.trace("Add %s %s", bsn, version); RevisionRef ref = getRevisionRef(bsn, version); add(ref); } void add(RevisionRef ref) throws Exception { // Cleanup existing versions // We remove everything between [mask(v), v) Version newVersion = toVersion(ref.baseline, ref.qualifier); reporter.trace("New version " + ref.bsn + " " + newVersion); Version newMask = mask(newVersion); List<Version> toBeDeleted = new ArrayList<Version>(); for (Version existingVersion : index.getVersions(ref.bsn)) { Version existingMask = mask(existingVersion); if (newMask.equals(existingMask)) { reporter.trace("delete %s-%s", ref.bsn, existingVersion); toBeDeleted.add(existingVersion); } } for (Version v : toBeDeleted) index.delete(ref.bsn, v); reporter.trace("add %s-%s", ref.bsn, newVersion); index.addRevision(ref); getLocal(ref, null, new LocalDownloadListener()); if (index.isRecurse()) { Iterable<RevisionRef> refs = getClosure(ref); for (RevisionRef r : refs) { index.addRevision(r); getLocal(ref, null, new LocalDownloadListener()); } } index.save(indexFile); } /** * @param ref * @return * @throws Exception */ private Iterable<RevisionRef> getClosure(RevisionRef ref) throws Exception { return library.getClosure(ref.revision, false); } public void delete(String bsn, Version version, boolean immediate) throws Exception { reporter.trace("Delete %s %s", bsn, version); Library.RevisionRef resource = index.getRevisionRef(bsn, version); if (resource != null) { boolean removed = index.delete(bsn, version); reporter.trace("Was present " + removed); index.save(); } else reporter.trace("No such resource"); } public void delete(String bsn) throws Exception { reporter.trace("Delete %s", bsn); Set<Version> set = new HashSet<Version>(index.getVersions(bsn)); reporter.trace("Versions %s", set); for (Version version : set) { delete(bsn, version, true); } } public boolean dropTarget(URI uri) throws Exception { try { init(); reporter.trace("dropTarget " + uri); Matcher m = JPM_REVISION_URL_PATTERN.matcher(uri.toString()); if (!m.matches()) { reporter.trace("not a proper url to drop " + uri); return false; } Revision revision = getRevision(new Coordinate(m.group(1), m.group(2), m.group(3), m.group(4))); if (revision == null) { reporter.error("no revision found for %s", uri); return false; } Library.RevisionRef resource = index.getRevisionRef(revision._id); if (resource != null) { reporter.trace("resource already loaded " + uri); return true; } RevisionRef ref = new RevisionRef(revision); reporter.trace("adding revision " + ref); add(ref); return true; } catch (Exception e) { e.printStackTrace(); throw e; } } /* * A utility to open a URL on different OS's browsers * @param url the url to open * @throws IOException */ void open(String url) { try { try { Desktop desktop = Desktop.getDesktop(); desktop.browse(new URI(url)); return; } catch (Throwable e) { } String os = System.getProperty("os.name").toLowerCase(); Runtime rt = Runtime.getRuntime(); if (os.indexOf("mac") >= 0 || os.indexOf("darwin") >= 0) { rt.exec("open " + url); } else if (os.indexOf("win") >= 0) { // this doesn't support showing urls in the form of // "page.html#nameLink" rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.indexOf("nix") >= 0 || os.indexOf("nux") >= 0) { // Do a best guess on unix until we get a platform independent // way // Build a list of browsers to try, in this order. String[] browsers = { "epiphany", "firefox", "mozilla", "konqueror", "netscape", "opera", "links", "lynx" }; // Build a command string which looks like // "browser1 "url" || browser2 "url" ||..." StringBuffer cmd = new StringBuffer(); for (int i = 0; i < browsers.length; i++) cmd.append((i == 0 ? "" : " || ") + browsers[i] + " \"" + url + "\" "); rt.exec(new String[] { "sh", "-c", cmd.toString() }); } else reporter.trace("Open " + url); } catch (Exception e) { throw new RuntimeException(e); } } /** * Answer the resource descriptors from a URL */ // @Override public Set<ResourceDescriptor> getResources(URI url, boolean includeDependencies) throws Exception { try { Matcher m = JPM_REVISION_URL_PATTERN.matcher(url.toString()); if (!m.matches()) { return null; } Set<ResourceDescriptor> resources = new HashSet<ResourceDescriptor>(); Revision revision = getRevision(new Coordinate(m.group(1), m.group(2), m.group(3), m.group(4))); if (revision != null) { ResourceDescriptor rd = createResourceDescriptor(new RevisionRef(revision)); resources.add(rd); if (includeDependencies) { for (RevisionRef dependency : library.getClosure(revision._id, false)) { ResourceDescriptor dep = createResourceDescriptor(dependency); dep.dependency = true; resources.add(dep); } } } return resources; } catch (Exception e) { e.printStackTrace(); return Collections.emptySet(); } } private ResourceDescriptor createResourceDescriptor(RevisionRef ref) throws Exception { ResourceDescriptorImpl rd = new ResourceDescriptorImpl(ref); rd.bsn = ref.bsn; rd.version = toVersion(ref.baseline, ref.qualifier); rd.description = ref.description; rd.id = ref.revision; rd.included = getIndex().getRevisionRef(rd.id) != null; rd.phase = toPhase(ref.phase); return rd; } private Index getIndex() throws Exception { init(); return index; } private aQute.bnd.service.repository.Phase toPhase(aQute.service.library.Library.Phase phase) { switch (phase) { case STAGING : return aQute.bnd.service.repository.Phase.STAGING; case LOCKED : return aQute.bnd.service.repository.Phase.LOCKED; case MASTER : return aQute.bnd.service.repository.Phase.MASTER; case RETIRED : return aQute.bnd.service.repository.Phase.RETIRED; case WITHDRAWN : return aQute.bnd.service.repository.Phase.WITHDRAWN; default : return null; } } // @Override public Set<ResourceDescriptor> query(String query) throws Exception { Set<ResourceDescriptor> resources = new HashSet<ResourceDescriptor>(); RevisionRef master = null; RevisionRef staging = null; for (Program p : library.getQueryPrograms(query, 0, 100)) { for (RevisionRef ref : p.revisions) { if (master == null && ref.phase == Library.Phase.MASTER) { master = ref; } else if (staging != null && ref.phase == Library.Phase.STAGING) { staging = ref; } } if (master != null) resources.add(createResourceDescriptor(master)); if (staging != null) resources.add(createResourceDescriptor(staging)); } return resources; } // @Override public boolean addResource(ResourceDescriptor resource) throws Exception { if (resource instanceof ResourceDescriptorImpl) { RevisionRef ref = ((ResourceDescriptorImpl) resource).revision; if (index.addRevision(ref)) { index.save(); return true; } } return false; } // @Override public Set<ResourceDescriptor> findResources(org.osgi.resource.Requirement requirement, boolean includeDependencies) throws Exception { FilterParser fp = new FilterParser(); aQute.bnd.osgi.resource.FilterParser.Expression expression = fp.parse(requirement.getDirectives().get("filter")); String query = expression.query(); if ( query == null) { return Collections.emptySet(); } return query(query); } /** * Check if there is at least one network interface up and running so we * have internet access. */ private boolean isConnected() throws SocketException { if (offline) return false; try { for (Enumeration<NetworkInterface> e = NetworkInterface.getNetworkInterfaces(); e.hasMoreElements();) { NetworkInterface interf = e.nextElement(); if (!interf.isLoopback() && interf.isUp()) return true; } } catch (SocketException e) { // ignore, we assume we're offline } return false; } /** * @param bsn * @return * @throws Exception */ private Program getProgram(final String bsn, boolean force) throws Exception { Program p = cache.getProgram(bsn); if (p == null || force) { p = library.getProgram(Library.OSGI_GROUP, bsn); if (p != null) cache.putProgram(bsn, p); } return p; } /** * @param sha * @return * @throws Exception */ private Revision getRevision(Coordinate c) throws Exception { return library.getRevisionByCoordinate(c); } public byte[] getDigest() throws Exception { init(); return index.getRevisions()._id; } /** * Ensure that the revisions is updated * * @throws Exception */ byte[] sync() throws Exception { Revisions revisions = index.getRevisions(); if (!index.isSynced()) { reporter.trace("Syncing repo indexes"); library.createRevisions(revisions); index.setSynced(revisions._id); } return revisions._id; } /** * Compare a list of versions against the available versions and return the * desired list. This will remove all staged version that are 'below' a * master. */ public SortedSet<Version> update(SortedSet<Version> input, Program p) throws Exception { Map<Version,Version> mapped = new HashMap<Version,Version>(); for (RevisionRef ref : p.revisions) { Version a = toVersion(ref.baseline, ref.qualifier); Version mask = mask(a); Version highest = mapped.get(mask); if (highest == null || a.compareTo(highest) > 0 || ref.phase == Library.Phase.MASTER) mapped.put(mask, a); } HashSet<Version> output = new HashSet<Version>(); for (Version i : input) { Version mask = mask(i); Version found = mapped.get(mask); if (found != null) output.add(found); else reporter.error("[update] Missing version %s for bsn %s", mask, p.last.bsn); } return new SortedList<Version>(output); } private static Version mask(Version in) { return new Version(in.getMajor(), in.getMinor()); } /** * Remove any unused entries in this repository * * @throws Exception */ void cleanUp() throws Exception { Workspace workspace = registry.getPlugin(Workspace.class); Set<Container> set = new HashSet<Container>(); for (Project project : workspace.getAllProjects()) { set.addAll(project.getBuildpath()); set.addAll(project.getRunbundles()); set.addAll(project.getRunpath()); set.addAll(project.getTestpath()); set.addAll(project.getBootclasspath()); set.addAll(project.getClasspath()); // // This should be replaced with project.getRunfw() // String s = project.getProperty(Constants.RUNFW); List<Container> bundles = project.getBundles(Strategy.HIGHEST, s, Constants.RUNFW); set.addAll(bundles); File base = project.getBase(); for (File sub : base.listFiles()) { if (sub.getName().endsWith(".bndrun")) { Project bndrun = new Project(workspace, base, sub); try { set.addAll(bndrun.getRunbundles()); set.addAll(bndrun.getRunpath()); set.addAll(bndrun.getTestpath()); set.addAll(bndrun.getBootclasspath()); set.addAll(bndrun.getClasspath()); } finally { bndrun.close(); } } } } Set<RevisionRef> refs = new HashSet<RevisionRef>(index.getRevisionRefs()); Set<RevisionRef> keep = new HashSet<RevisionRef>(); for (Container libOrRev : set) { for (Container c : libOrRev.getMembers()) { reporter.trace("Dependency " + c); if (!Verifier.isVersion(c.getVersion())) continue; RevisionRef ref = index.getRevisionRef(c.getBundleSymbolicName(), new Version(c.getVersion())); if (ref != null) refs.remove(ref); else { // missing! reporter.trace("Missing " + c.getBundleSymbolicName()); Coordinate coord = new Coordinate(c.getBundleSymbolicName()); Revision rev = library.getRevisionByCoordinate(coord); if (rev != null) { index.addRevision(new RevisionRef(rev)); } else System.out.printf("not found %s\n", c); } keep.add(ref); } } for (RevisionRef ref : refs) { index.delete(ref.bsn, Index.toVersion(ref)); } index.save(); } /** * Get a Resource Descriptor for a given bsn/version * * @param bsn * @param version * @return * @throws Exception */ public ResourceDescriptor getDescriptor(String bsn, Version version) throws Exception { RevisionRef revisionRef = index.getRevisionRef(bsn, version); if (revisionRef == null) return null; return createResourceDescriptor(revisionRef); } /** * Copy a string to the clipboard */ void toClipboard(String bsn, Version base) { Version nextMajor = new Version(base.getMajor() + 1, 0, 0); toClipboard(bsn + ";version='[" + base.getWithoutQualifier() + "," + nextMajor + ")'"); } void toClipboard(String s) { if (s == null) return; StringSelection stringSelection = new StringSelection(s); Clipboard clpbrd = Toolkit.getDefaultToolkit().getSystemClipboard(); clpbrd.setContents(stringSelection, null); } @Override public String toString() { byte[] digest; try { digest = getDigest(); } catch (Exception e) { throw new RuntimeException(e); } return "JpmRepository [writable=" + canWrite() + ", " + (getName() != null ? "name=" + getName() + ", " : "") + (getLocation() != null ? "location=" + getLocation() + ", " : "") + (digest != null ? "digest=" + Hex.toHexString(digest) : "") + "]"; } }
Remove trailing whitespace from JPM tooltips When the tooltips for JPM resources are rendered in Eclipse (at least on Windows), the trailing newlines cause them to be 1 line taller than they should be. So trim them before returning them. Also changed one place where string concatenation was used when a format string should have been. Signed-off-by: Sean Bright <[email protected]>
biz.aQute.repository/src/aQute/bnd/jpm/Repository.java
Remove trailing whitespace from JPM tooltips
<ide><path>iz.aQute.repository/src/aQute/bnd/jpm/Repository.java <ide> f.format("Number of revs %s\n", index.getRevisionRefs().size()); <ide> f.format("Dirty %s\n", index.isDirty()); <ide> <del> return f.toString(); <add> return f.toString().trim(); <ide> } <ide> finally { <ide> f.close(); <ide> else <ide> sb.format("No description\n"); <ide> j.wrap((StringBuilder) sb.out()); <del> return sb.toString(); <add> return sb.toString().trim(); <ide> } <ide> finally { <ide> sb.close(); <ide> <ide> if (r.description != null) <ide> sb.format("%s\n\n", r.description.replaceAll("#\\s*", "")); <del> <add> <ide> sb.format("Size: %s\n", size(r.size, 0)); <ide> sb.format("SHA-1: %s\n", Hex.toHexString(r.revision)); <ide> sb.format("Age: %s\n", age(r.created)); <ide> <ide> Runnable update = getUpdateAction(p, r); <ide> if (update != null) { <del> sb.format(DOWN_ARROW + " This version can be updated to " + update); <add> sb.format("%c This version can be updated to %s\n", DOWN_ARROW, update); <ide> } <ide> } <ide> <ide> if (sources.isFile()) <ide> sb.format("Has sources: %s\n", sources.getAbsolutePath()); <ide> else <del> sb.format("No sources"); <add> sb.format("No sources\n"); <ide> <ide> j.wrap((StringBuilder) sb.out()); <del> return sb.toString(); <add> return sb.toString().trim(); <ide> } <ide> finally { <ide> sb.close();
Java
unlicense
fatal: invalid reference: FETCH_HEAD^
1d71853d79207bb8fd18add2428ad3dcc6b8d648
128
cwiggins/Scheduler
/* Scheduler.java * Operating Systems * Fall 2013 * Curtiss J Wiggins */ import java.util.LinkedList; import java.util.Scanner; import java.text.DecimalFormat; public class Scheduler { public static void arriving(LinkedList<Process> rqueue, LinkedList<Process> nrqueue, double CPUTime, double Atime){ int i = 0; if(CPUTime == Atime){ while(nrqueue.size() > 0){ if(nrqueue.get(i).Atime != Atime+1){ rqueue.add(nrqueue.remove()); }else{ break; } } } } public static void executing(Process process){ double j = process.Btime, runtime = 0; } public static void main (String[] args){ double Atime,Wtime,Btime,Tat,Twt,CPUTime; int NumOfProcesses = 0, i = 0; LinkedList<Process> rqueue = new LinkedList<Process> (); LinkedList<Process> nrqueue = new LinkedList<Process> (); Scanner sc = new Scanner (System.in); System.out.println("Please input number of Processes."); NumOfProcesses = sc.nextInt(); System.out.println(NumOfProcesses); /* * Loop for Adding Processes */ while (i < NumOfProcesses){ System.out.println("Please input Arrival Time:"); Atime = sc.nextDouble(); System.out.println("Please input Burst Time:"); Btime = sc.nextDouble(); Process task = new Process (Atime, Btime, "P"+(i+1)); if (task.Atime == 0){ rqueue.add(task); } else{ nrqueue.add(task); } i++; } System.out.println("In Ready Queue: " + rqueue.get(0).pid); /* * Printing out processes in not ready queue. These processes * technically haven't arrived yet. */ i = 0; System.out.print("In Not Ready Queue:"); while (i < nrqueue.size()){ System.out.println(nrqueue.get(i).pid); i++; } /* * Time to start executing some processes. */ CPUTime = 0.0; i = 1; double j = rqueue.get(i-1).Btime; Process executing = rqueue.remove(i-1); boolean notdone = true; while (notdone){ if (CPUTime != j){ if(CPUTime == i){ arriving(rqueue, nrqueue, CPUTime, i); i++; } CPUTime = round.round(CPUTime+.1); } else{ notdone = false; } } System.out.println("Process Done."); System.out.println("In Ready Queue Now:"); for(i=0;i<rqueue.size();i++){ System.out.println(rqueue.get(i).pid); } } } class Process { public double Atime, Btime; public String pid; public Process (double Atime, double Btime, String pid) { this.Atime=Atime; this.Btime=Btime; this.pid = pid; } } class round { public static double round(double val){ DecimalFormat df = new DecimalFormat ("#.#"); val = Double.valueOf(df.format(val)); return val; } }
Scheduler.java
Clock simulation and simulation of first process executing working
Scheduler.java
Clock simulation and simulation of first process executing working
<ide><path>cheduler.java <add>/* Scheduler.java <add> * Operating Systems <add> * Fall 2013 <add> * Curtiss J Wiggins <add> */ <add> <add>import java.util.LinkedList; <add>import java.util.Scanner; <add>import java.text.DecimalFormat; <add> <add> <add>public class Scheduler { <add> <add> public static void arriving(LinkedList<Process> rqueue, <add> LinkedList<Process> nrqueue, double CPUTime, double Atime){ <add> <add> int i = 0; <add> if(CPUTime == Atime){ <add> while(nrqueue.size() > 0){ <add> if(nrqueue.get(i).Atime != Atime+1){ <add> rqueue.add(nrqueue.remove()); <add> <add> }else{ <add> break; <add> } <add> } <add> } <add> } <add> <add> public static void executing(Process process){ <add> double j = process.Btime, runtime = 0; <add> } <add> public static void main (String[] args){ <add> double Atime,Wtime,Btime,Tat,Twt,CPUTime; <add> int NumOfProcesses = 0, i = 0; <add> LinkedList<Process> rqueue = new LinkedList<Process> (); <add> LinkedList<Process> nrqueue = new LinkedList<Process> (); <add> <add> Scanner sc = new Scanner (System.in); <add> System.out.println("Please input number of Processes."); <add> NumOfProcesses = sc.nextInt(); <add> System.out.println(NumOfProcesses); <add> <add> /* <add> * Loop for Adding Processes <add> */ <add> while (i < NumOfProcesses){ <add> System.out.println("Please input Arrival Time:"); <add> Atime = sc.nextDouble(); <add> System.out.println("Please input Burst Time:"); <add> Btime = sc.nextDouble(); <add> Process task = new Process (Atime, Btime, "P"+(i+1)); <add> if (task.Atime == 0){ <add> rqueue.add(task); <add> } <add> else{ <add> nrqueue.add(task); <add> } <add> i++; <add> } <add> System.out.println("In Ready Queue: " + rqueue.get(0).pid); <add> <add> /* <add> * Printing out processes in not ready queue. These processes <add> * technically haven't arrived yet. <add> */ <add> i = 0; <add> System.out.print("In Not Ready Queue:"); <add> while (i < nrqueue.size()){ <add> System.out.println(nrqueue.get(i).pid); <add> i++; <add> } <add> <add> /* <add> * Time to start executing some processes. <add> */ <add> CPUTime = 0.0; <add> i = 1; <add> double j = rqueue.get(i-1).Btime; <add> Process executing = rqueue.remove(i-1); <add> boolean notdone = true; <add> while (notdone){ <add> if (CPUTime != j){ <add> if(CPUTime == i){ <add> arriving(rqueue, nrqueue, CPUTime, i); <add> i++; <add> } <add> CPUTime = round.round(CPUTime+.1); <add> } <add> else{ <add> notdone = false; <add> } <add> } <add> System.out.println("Process Done."); <add> System.out.println("In Ready Queue Now:"); <add> for(i=0;i<rqueue.size();i++){ <add> System.out.println(rqueue.get(i).pid); <add> } <add> } <add>} <add> <add>class Process { <add> public double Atime, Btime; <add> public String pid; <add> public Process (double Atime, double Btime, String pid) { <add> this.Atime=Atime; <add> this.Btime=Btime; <add> this.pid = pid; <add> } <add>} <add>class round { <add>public static double round(double val){ <add> DecimalFormat df = new DecimalFormat ("#.#"); <add> val = Double.valueOf(df.format(val)); <add> return val; <add>} <add>}
JavaScript
apache-2.0
766af7ed5199373ca216a573f81c32395b25cab0
0
serverboards/serverboards,serverboards/serverboards,serverboards/serverboards,serverboards/serverboards,serverboards/serverboards
import React from 'react' import rpc from 'app/rpc' import {MarkdownPreview} from 'react-marked-markdown' import {to_list} from 'app/utils' import Flash from 'app/flash' const RichDescription=React.createClass({ process_description(vars){ vars = vars || [] let text = this.props.value vars.map( (kv) => { text=text.replace(`{{${kv[0]}}}`, kv[1]) }) return text }, getInitialState(){ return { content: this.process_description(), extraClass: "loading" } }, componentDidMount(){ Promise.all( (this.props.vars || []).map( (v) => { // For each var run plugin, get value, stop plugin let p=new Promise((resolve, reject) => { rpc.call("plugin.start", [v.command]) .then((uuid) => { rpc.call(`${uuid}.${v.call}`, []) .then((content) => resolve([v.id, content])) .catch((e) => reject(e)) .then(() => rpc.call("plugin.stop", [uuid])) .catch((e) => true) // no prob if no stop }) .catch((e) => reject(e)) }) return p })).then( (vars) => { // Then set it into the state, update content this.setState({content: this.process_description(vars), extraClass: ""}) }).catch((e) => { console.error(e) this.setState({content: "Error loading dynamic data. Contact plugin author. [Error #100]", extraClass: "error"}) Flash.error("Error loading dynamic data. Contact plugin author.",{error: 100}) }) }, render(){ const props=this.props const state=this.state return ( <div className={`${props.className} ${state.extraClass || ""}`}><MarkdownPreview value={state.content}/></div> ) } }) let GenericField=React.createClass({ getInitialState(){ return { items: [] } }, handleChange: function(ev){ this.props.setValue(this.props.name, ev.target.value) }, componentDidMount(){ // Some may need post initialization switch (this.props.type){ case 'select': $(this.refs.select).dropdown() break; case 'service': let self=this rpc.call("service.list", {traits: (self.props.traits || [])}).then( (services) => { console.log("Got services: %o", services) const results=services.map( (s) => ({ //name: s.name, value: s.uuid, name: s.name, description: s.fields.filter( (p) => p.card ).map( (p) => p.value ).join(',') })) self.setState({items: results}) $(self.refs.select).dropdown({ onChange(value){ self.props.setValue(self.props.name, value) } }) }) break; default: ;; break; } }, render(){ let props=this.props switch (props.type){ case undefined: case '': case 'text': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <input type="text" name={props.name} placeholder={props.placeholder || props.description} defaultValue={props.value} onChange={this.handleChange}/> </div> ) case 'url': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <input type="url" name={props.name} placeholder={props.placeholder || props.description} defaultValue={props.value} onChange={this.handleChange}/> </div> ) case 'textarea': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <textarea name={props.name} placeholder={props.placeholder || props.description} defaultValue={props.value} onChange={this.handleChange}/> </div> ) case 'password': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <input type="password" name={props.name} placeholder={props.placeholder || props.description} defaultValue={props.value} onChange={this.handleChange}/> </div> ) case 'description': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> </div> ) case 'hidden': return ( <input type="hidden" disabled={true} name={props.name} value={props.value}/> ) case 'select': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <select ref="select" name={props.name} defaultValue={props.value} className={`ui fluid ${props.search ? "search" : ""} dropdown`} onChange={this.handleChange}> {props.options.map((o) => ( <option value={o.value}>{o.label}</option> ))} </select> </div> ) case 'service': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <div ref="select" className={`ui fluid ${props.search ? "search" : ""} selection dropdown`}> <input type="hidden" name={props.name} defaultValue={props.value} onChange={this.handleChange}/> <i className="dropdown icon"></i> <div className="default text" style={{display:"block"}}>{props.value || props.placeholder}</div> <div className="menu"> {(this.state.items || []).map( (ac) => ( <div key={ac.id} className="item" data-value={ac.value}>{ac.name}<span className="ui meta" style={{float:"right"}}>{ac.description}</span></div> ))} </div> </div> </div> ) default: return ( <div className="ui message error" style={{display: "block"}}>Unknown field type "{props.type}"</div> ) } } }) let GenericForm=React.createClass({ getInitialState(props){ props = props || this.props let state={}; (props.fields || []).map((f) => state[f.name]=f.value || '') if (props.data){ Object.keys(props.data).map( (k) => { state[k]=props.data[k] }) } return state }, setValue : function(k, v){ let update = {[k]: v } this.setState( update ) let nstate=Object.assign({}, this.state, update ) // looks like react delays state change, I need it now //console.log(nstate, this.props) this.props.updateForm && this.props.updateForm(nstate) }, componentWillReceiveProps(newprops){ if (newprops.fields != this.props.fields || newprops.data != this.props.data){ this.setState( this.getInitialState(newprops) ) } }, componentDidMount(){ let fields = {}; (this.props.fields || []).map((f) => { if (f.validation) fields[f.name]=f.validation }) $(this.refs.form).form({ on: 'blur', fields }).on('submit', function(ev){ ev.preventDefault() }) this.props.updateForm && this.props.updateForm(this.state) }, render(){ const props=this.props return ( <form ref="form" className={`ui form ${props.className || ""}`} onSubmit={(ev) => { ev.preventDefault(); props.onSubmit && props.onSubmit(ev) }}> {(props.fields || []).map((f) => ( <GenericField key={f.name} setValue={this.setValue} value={this.state[f.name]} {...f}/> ))} {props.children} </form> ) } }) export default GenericForm
frontend/app/js/components/genericform.js
import React from 'react' import rpc from 'app/rpc' import {MarkdownPreview} from 'react-marked-markdown' import {to_list} from 'app/utils' import Flash from 'app/flash' const RichDescription=React.createClass({ process_description(vars){ vars = vars || [] let text = this.props.value vars.map( (kv) => { text=text.replace(`{{${kv[0]}}}`, kv[1]) }) return text }, getInitialState(){ return { content: this.process_description(), extraClass: "loading" } }, componentDidMount(){ Promise.all( (this.props.vars || []).map( (v) => { // For each var run plugin, get value, stop plugin let p=new Promise((resolve, reject) => { rpc.call("plugin.start", [v.command]) .then((uuid) => { rpc.call(`${uuid}.${v.call}`, []) .then((content) => resolve([v.id, content])) .catch((e) => reject(e)) .then(() => rpc.call("plugin.stop", [uuid])) .catch((e) => true) // no prob if no stop }) .catch((e) => reject(e)) }) return p })).then( (vars) => { // Then set it into the state, update content this.setState({content: this.process_description(vars), extraClass: ""}) }).catch((e) => { console.error(e) this.setState({content: "Error loading dynamic data. Contact plugin author. [Error #100]", extraClass: "error"}) Flash.error("Error loading dynamic data. Contact plugin author.",{error: 100}) }) }, render(){ const props=this.props const state=this.state return ( <div className={`${props.className} ${state.extraClass || ""}`}><MarkdownPreview value={state.content}/></div> ) } }) let GenericField=React.createClass({ getInitialState(){ return { items: [] } }, handleChange: function(ev){ this.props.setValue(this.props.name, ev.target.value) }, componentDidMount(){ // Some may need post initialization switch (this.props.type){ case 'select': $(this.refs.select).dropdown() break; case 'service': let self=this rpc.call("service.list", {traits: (self.props.traits || [])}).then( (services) => { console.log("Got services: %o", services) const results=services.map( (s) => ({ //name: s.name, value: s.uuid, name: s.name, description: s.fields.filter( (p) => p.card ).map( (p) => p.value ).join(',') })) self.setState({items: results}) $(self.refs.select).dropdown({ onChange(value){ self.props.setValue(self.props.name, value) } }) }) break; default: ;; break; } }, render(){ let props=this.props switch (props.type){ case undefined: case '': case 'text': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <input type="text" name={props.name} placeholder={props.placeholder || props.description} defaultValue={props.value} onChange={this.handleChange}/> </div> ) case 'url': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <input type="url" name={props.name} placeholder={props.placeholder || props.description} defaultValue={props.value} onChange={this.handleChange}/> </div> ) case 'textarea': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <textarea name={props.name} placeholder={props.placeholder || props.description} defaultValue={props.value} onChange={this.handleChange}/> </div> ) case 'password': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <input type="password" name={props.name} placeholder={props.placeholder || props.description} defaultValue={props.value} onChange={this.handleChange}/> </div> ) case 'description': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> </div> ) case 'hidden': return ( <input type="hidden" disabled={true} name={props.name} value={props.value}/> ) case 'select': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <select ref="select" name={props.name} defaultValue={props.value} className={`ui fluid ${props.search ? "search" : ""} dropdown`} onChange={this.handleChange}> {props.options.map((o) => ( <option value={o.value}>{o.label}</option> ))} </select> </div> ) case 'service': return ( <div className="field"> <label>{props.label}</label> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <div ref="select" className={`ui fluid ${props.search ? "search" : ""} selection dropdown`}> <input type="hidden" name={props.name} defaultValue={props.value} onChange={this.handleChange}/> <i className="dropdown icon"></i> <div className="default text" style={{display:"block"}}>{props.value || props.placeholder}</div> <div className="menu"> {(this.state.items || []).map( (ac) => ( <div key={ac.id} className="item" data-value={ac.value}>{ac.name}<span className="ui meta" style={{float:"right"}}>{ac.description}</span></div> ))} </div> </div> </div> ) default: return ( <div className="ui message error" style={{display: "block"}}>Unknown field type "{props.type}"</div> ) } } }) let GenericForm=React.createClass({ getInitialState(props){ props = props || this.props let state={}; (props.fields || []).map((f) => state[f.name]=f.value || '') if (props.data){ Object.keys(props.data).map( (k) => { state[k]=props.data[k] }) } return state }, setValue : function(k, v){ let update = {[k]: v } this.setState( update ) let nstate=Object.assign({}, this.state, update ) // looks like react delays state change, I need it now //console.log(nstate, this.props) this.props.updateForm && this.props.updateForm(nstate) }, componentWillReceiveProps(newprops){ if (newprops.fields != this.props.fields || newprops.data != this.props.data){ this.setState( this.getInitialState(newprops) ) } }, componentDidMount(){ let fields = {}; (this.props.fields || []).map((f) => { if (f.validation) fields[f.name]=f.validation }) $(this.refs.form).form({ on: 'blur', fields }).on('submit', function(ev){ ev.preventDefault() }) this.props.updateForm && this.props.updateForm(this.state) }, render(){ const props=this.props return ( <form ref="form" className={`ui form ${props.className || ""}`} onSubmit={(ev) => { ev.preventDefault(); props.onSubmit && props.onSubmit(ev) }}> {(props.fields || []).map((f) => ( <GenericField key={f.name} setValue={this.setValue} value={this.state[f.name]} {...f}/> ))} {props.children} </form> ) } }) export default GenericForm
Show description in all GenericForms
frontend/app/js/components/genericform.js
Show description in all GenericForms
<ide><path>rontend/app/js/components/genericform.js <ide> case 'text': <ide> return ( <ide> <div className="field"> <del> <label>{props.label}</label> <del> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <del> <input type="text" <del> name={props.name} <del> placeholder={props.placeholder || props.description} <del> defaultValue={props.value} <del> onChange={this.handleChange}/> <add> <label>{props.label}</label> <add> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <add> <input type="text" <add> name={props.name} <add> placeholder={props.placeholder || props.description} <add> defaultValue={props.value} <add> onChange={this.handleChange}/> <ide> </div> <ide> ) <ide> case 'url': <ide> return ( <ide> <div className="field"> <del> <label>{props.label}</label> <del> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <del> <input type="url" <del> name={props.name} <del> placeholder={props.placeholder || props.description} <del> defaultValue={props.value} <del> onChange={this.handleChange}/> <add> <label>{props.label}</label> <add> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <add> <input type="url" <add> name={props.name} <add> placeholder={props.placeholder || props.description} <add> defaultValue={props.value} <add> onChange={this.handleChange}/> <ide> </div> <ide> ) <ide> case 'textarea': <ide> return ( <ide> <div className="field"> <del> <label>{props.label}</label> <del> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <del> <textarea <del> name={props.name} <del> placeholder={props.placeholder || props.description} <del> defaultValue={props.value} <del> onChange={this.handleChange}/> <add> <label>{props.label}</label> <add> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <add> <textarea <add> name={props.name} <add> placeholder={props.placeholder || props.description} <add> defaultValue={props.value} <add> onChange={this.handleChange}/> <ide> </div> <ide> ) <ide> case 'password': <ide> return ( <ide> <div className="field"> <del> <label>{props.label}</label> <del> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <del> <input type="password" <del> name={props.name} <del> placeholder={props.placeholder || props.description} <del> defaultValue={props.value} <del> onChange={this.handleChange}/> <add> <label>{props.label}</label> <add> <RichDescription className="ui meta" value={props.description} vars={props.vars}/> <add> <input type="password" <add> name={props.name} <add> placeholder={props.placeholder || props.description} <add> defaultValue={props.value} <add> onChange={this.handleChange}/> <ide> </div> <ide> ) <ide> case 'description':
Java
bsd-3-clause
3534ea51d875aabbe0fb15e0dda56bb3993d1b64
0
orc-lang/orc,orc-lang/orc,orc-lang/orc,orc-lang/orc,orc-lang/orc
// // OrcLabelProvider.java -- Java class OrcLabelProvider // Project OrcEclipse // // $Id$ // // Created by jthywiss on Aug 5, 2009. // // Copyright (c) 2010 The University of Texas at Austin. All rights reserved. // // Use and redistribution of this file is governed by the license terms in // the LICENSE file found in the project's top-level directory and also found at // URL: http://orc.csres.utexas.edu/license.shtml . // package edu.utexas.cs.orc.orceclipse.edit; import java.util.HashSet; import java.util.Set; import orc.ast.AST; import orc.ast.OrcSyntaxConvertable; import orc.ast.ext.ClassImport; import orc.ast.ext.Def; import orc.ast.ext.DefCapsule; import orc.ast.ext.DefSig; import orc.ast.ext.Include; import orc.ast.ext.Pattern; import orc.ast.ext.SiteDeclaration; import orc.ast.ext.Type; import orc.ast.ext.TypeDeclaration; import orc.ast.ext.Val; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IResource; import org.eclipse.imp.editor.ModelTreeNode; import org.eclipse.imp.services.ILabelProvider; import org.eclipse.imp.utils.MarkerUtils; import org.eclipse.jface.resource.ImageRegistry; import org.eclipse.jface.viewers.DecorationOverlayIcon; import org.eclipse.jface.viewers.IDecoration; import org.eclipse.jface.viewers.ILabelProviderListener; import org.eclipse.swt.graphics.Image; import scala.collection.JavaConversions; import edu.utexas.cs.orc.orceclipse.Activator; import edu.utexas.cs.orc.orceclipse.OrcResources; /** * Label provider for the Orc Language. * <p> * A label provider maps an element of a tree model to * an optional image and optional text string used to display * the element in the user interface. * * @see org.eclipse.imp.editor.ModelTreeNode */ public class OrcLabelProvider implements ILabelProvider { private final Set<ILabelProviderListener> fListeners = new HashSet<ILabelProviderListener>(); private static ImageRegistry orcImageRegistry = Activator.getInstance().getImageRegistry(); private static Image ORC_FILE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_FILE_OBJ); private static Image ORC_FILE_W_ERROR = new DecorationOverlayIcon(ORC_FILE_OBJ_IMAGE, orcImageRegistry.getDescriptor(OrcResources.ERROR_OVR), IDecoration.BOTTOM_LEFT).createImage(); private static Image ORC_FILE_W_WARNING = new DecorationOverlayIcon(ORC_FILE_OBJ_IMAGE, orcImageRegistry.getDescriptor(OrcResources.WARNING_OVR), IDecoration.BOTTOM_LEFT).createImage(); private static Image ORC_INCLUDE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_INCLUDE_OBJ); private static Image ORC_INCLUDE_W_ERROR = new DecorationOverlayIcon(ORC_INCLUDE_OBJ_IMAGE, orcImageRegistry.getDescriptor(OrcResources.ERROR_OVR), IDecoration.BOTTOM_LEFT).createImage(); private static Image ORC_INCLUDE_W_WARNING = new DecorationOverlayIcon(ORC_INCLUDE_OBJ_IMAGE, orcImageRegistry.getDescriptor(OrcResources.WARNING_OVR), IDecoration.BOTTOM_LEFT).createImage(); private static Image ORC_GENERIC_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_GENERIC_OBJ); private static Image ORC_DEF_TYPE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_DEF_TYPE_OBJ); private static Image ORC_DEF_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_DEF_OBJ); private static Image ORC_SITE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_SITE_OBJ); private static Image ORC_CLASS_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_CLASS_OBJ); private static Image ORC_VARIABLE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_VARIABLE_OBJ); private static Image ORC_TYPE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_TYPE_OBJ); /* (non-Javadoc) * @see org.eclipse.jface.viewers.ILabelProvider#getImage(java.lang.Object) */ public Image getImage(final Object element) { if (element instanceof IFile) { final IFile file = (IFile) element; Image elemImage = null; final int sev = MarkerUtils.getMaxProblemMarkerSeverity(file, IResource.DEPTH_ONE); if (!file.getName().toLowerCase().endsWith(".inc")) { //$NON-NLS-1$ // Assume Orc file switch (sev) { case IMarker.SEVERITY_ERROR: elemImage = ORC_FILE_W_ERROR; break; case IMarker.SEVERITY_WARNING: elemImage = ORC_FILE_W_WARNING; break; default: elemImage = ORC_FILE_OBJ_IMAGE; break; } } else { // Include file switch (sev) { case IMarker.SEVERITY_ERROR: elemImage = ORC_INCLUDE_W_ERROR; break; case IMarker.SEVERITY_WARNING: elemImage = ORC_INCLUDE_W_WARNING; break; default: elemImage = ORC_INCLUDE_OBJ_IMAGE; break; } } return elemImage; } final AST n = element instanceof ModelTreeNode ? (AST) ((ModelTreeNode) element).getASTNode() : (AST) element; return getImageFor(n); } /** * @param n AST node to retrieve an image * @return Image representing the type of the given AST node */ public static Image getImageFor(final AST n) { if (n instanceof Include) { return ORC_INCLUDE_OBJ_IMAGE; } if (n instanceof DefSig) { return ORC_DEF_TYPE_OBJ_IMAGE; } if (n instanceof Def || n instanceof DefCapsule) { return ORC_DEF_OBJ_IMAGE; } if (n instanceof SiteDeclaration) { return ORC_SITE_OBJ_IMAGE; } if (n instanceof ClassImport) { return ORC_CLASS_OBJ_IMAGE; } if (n instanceof Val) { return ORC_VARIABLE_OBJ_IMAGE; } if (n instanceof TypeDeclaration) { return ORC_TYPE_OBJ_IMAGE; } return ORC_GENERIC_OBJ_IMAGE; } /* (non-Javadoc) * @see org.eclipse.jface.viewers.ILabelProvider#getText(java.lang.Object) */ public String getText(final Object element) { final AST n = element instanceof ModelTreeNode ? (AST) ((ModelTreeNode) element).getASTNode() : (AST) element; return getLabelFor(n); } /** * @param n AST node to label * @return String representing a label of the given AST node */ public static String getLabelFor(final AST n) { if (n instanceof Include) { final Include idecl = (Include) n; return idecl.origin(); } if (n instanceof Def) { final Def dmc = (Def) n; return sigToString(dmc); } if (n instanceof DefCapsule) { final DefCapsule dmc = (DefCapsule) n; return sigToString(dmc); } if (n instanceof DefSig) { final DefSig dmt = (DefSig) n; return sigToString(dmt); } if (n instanceof SiteDeclaration) { return ((SiteDeclaration) n).name(); } if (n instanceof ClassImport) { return ((ClassImport) n).name(); } if (n instanceof Val) { return ((Val) n).p().toOrcSyntax(); } if (n instanceof TypeDeclaration) { return ((TypeDeclaration) n).name(); } // If we get here, someone forgot to add a case above.... return "<" + n.getClass().getSimpleName() + ">"; //$NON-NLS-1$ //$NON-NLS-2$ } /* (non-Javadoc) * @see org.eclipse.jface.viewers.IBaseLabelProvider#addListener(org.eclipse.jface.viewers.ILabelProviderListener) */ public void addListener(final ILabelProviderListener listener) { fListeners.add(listener); } /* (non-Javadoc) * @see org.eclipse.jface.viewers.IBaseLabelProvider#dispose() */ public void dispose() { } /* (non-Javadoc) * @see org.eclipse.jface.viewers.IBaseLabelProvider#isLabelProperty(java.lang.Object, java.lang.String) */ public boolean isLabelProperty(final Object element, final String property) { return false; } /* (non-Javadoc) * @see org.eclipse.jface.viewers.IBaseLabelProvider#removeListener(org.eclipse.jface.viewers.ILabelProviderListener) */ public void removeListener(final ILabelProviderListener listener) { fListeners.remove(listener); } private static String sigToString(final Def d) { final StringBuilder s = new StringBuilder(); s.append(d.name()); for (final scala.collection.immutable.List<Pattern> ps : JavaConversions.asIterable(d.formals())) { s.append('('); if (ps != null) { s.append(listMkString(JavaConversions.asIterable(ps), ", ")); //$NON-NLS-1$ } s.append(')'); } return s.toString(); } private static String sigToString(final DefCapsule d) { final StringBuilder s = new StringBuilder(); s.append(d.name()); for (final scala.collection.immutable.List<Pattern> ps : JavaConversions.asIterable(d.formals())) { s.append('('); if (ps != null) { s.append(listMkString(JavaConversions.asIterable(ps), ", ")); //$NON-NLS-1$ } s.append(')'); } return s.toString(); } private static String sigToString(final DefSig d) { final StringBuilder s = new StringBuilder(); s.append(d.name()); if (d.typeformals() != null && d.typeformals().size() > 0) { s.append('['); s.append(listMkString(JavaConversions.asIterable(d.typeformals()), ", ")); //$NON-NLS-1$ s.append(']'); } for (final scala.collection.immutable.List<Type> argTypes : JavaConversions.asIterable(d.argtypes())) { s.append('('); if (argTypes != null) { s.append(listMkString(JavaConversions.asIterable(argTypes), ", ")); //$NON-NLS-1$ } s.append(')'); } s.append(" :: "); //$NON-NLS-1$ s.append(d.returntype().toOrcSyntax()); return s.toString(); } private static String listMkString(final Iterable<?> theList, final String sep) { final StringBuilder sb = new StringBuilder(); for (final Object o : theList) { if (o instanceof OrcSyntaxConvertable) { sb.append(((OrcSyntaxConvertable) o).toOrcSyntax()); } else { sb.append(o.toString()); } sb.append(sep); } if (sb.length() == 0) { return ""; //$NON-NLS-1$ } else { return sb.substring(0, sb.length() - sep.length()); } } }
OrcEclipse/src/edu/utexas/cs/orc/orceclipse/edit/OrcLabelProvider.java
// // OrcLabelProvider.java -- Java class OrcLabelProvider // Project OrcEclipse // // $Id$ // // Created by jthywiss on Aug 5, 2009. // // Copyright (c) 2010 The University of Texas at Austin. All rights reserved. // // Use and redistribution of this file is governed by the license terms in // the LICENSE file found in the project's top-level directory and also found at // URL: http://orc.csres.utexas.edu/license.shtml . // package edu.utexas.cs.orc.orceclipse.edit; import java.util.HashSet; import java.util.Set; import orc.ast.AST; import orc.ast.ext.ClassImport; import orc.ast.ext.Def; import orc.ast.ext.DefCapsule; import orc.ast.ext.DefSig; import orc.ast.ext.Include; import orc.ast.ext.Pattern; import orc.ast.ext.SiteDeclaration; import orc.ast.ext.Type; import orc.ast.ext.TypeDeclaration; import orc.ast.ext.Val; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IResource; import org.eclipse.imp.editor.ModelTreeNode; import org.eclipse.imp.services.ILabelProvider; import org.eclipse.imp.utils.MarkerUtils; import org.eclipse.jface.resource.ImageRegistry; import org.eclipse.jface.viewers.DecorationOverlayIcon; import org.eclipse.jface.viewers.IDecoration; import org.eclipse.jface.viewers.ILabelProviderListener; import org.eclipse.swt.graphics.Image; import scala.collection.JavaConversions; import edu.utexas.cs.orc.orceclipse.Activator; import edu.utexas.cs.orc.orceclipse.OrcResources; /** * Label provider for the Orc Language. * <p> * A label provider maps an element of a tree model to * an optional image and optional text string used to display * the element in the user interface. * * @see org.eclipse.imp.editor.ModelTreeNode */ public class OrcLabelProvider implements ILabelProvider { private final Set<ILabelProviderListener> fListeners = new HashSet<ILabelProviderListener>(); private static ImageRegistry orcImageRegistry = Activator.getInstance().getImageRegistry(); private static Image ORC_FILE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_FILE_OBJ); private static Image ORC_FILE_W_ERROR = new DecorationOverlayIcon(ORC_FILE_OBJ_IMAGE, orcImageRegistry.getDescriptor(OrcResources.ERROR_OVR), IDecoration.BOTTOM_LEFT).createImage(); private static Image ORC_FILE_W_WARNING = new DecorationOverlayIcon(ORC_FILE_OBJ_IMAGE, orcImageRegistry.getDescriptor(OrcResources.WARNING_OVR), IDecoration.BOTTOM_LEFT).createImage(); private static Image ORC_INCLUDE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_INCLUDE_OBJ); private static Image ORC_INCLUDE_W_ERROR = new DecorationOverlayIcon(ORC_INCLUDE_OBJ_IMAGE, orcImageRegistry.getDescriptor(OrcResources.ERROR_OVR), IDecoration.BOTTOM_LEFT).createImage(); private static Image ORC_INCLUDE_W_WARNING = new DecorationOverlayIcon(ORC_INCLUDE_OBJ_IMAGE, orcImageRegistry.getDescriptor(OrcResources.WARNING_OVR), IDecoration.BOTTOM_LEFT).createImage(); private static Image ORC_GENERIC_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_GENERIC_OBJ); private static Image ORC_DEF_TYPE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_DEF_TYPE_OBJ); private static Image ORC_DEF_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_DEF_OBJ); private static Image ORC_SITE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_SITE_OBJ); private static Image ORC_CLASS_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_CLASS_OBJ); private static Image ORC_VARIABLE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_VARIABLE_OBJ); private static Image ORC_TYPE_OBJ_IMAGE = orcImageRegistry.get(OrcResources.ORC_TYPE_OBJ); /* (non-Javadoc) * @see org.eclipse.jface.viewers.ILabelProvider#getImage(java.lang.Object) */ public Image getImage(final Object element) { if (element instanceof IFile) { final IFile file = (IFile) element; Image elemImage = null; final int sev = MarkerUtils.getMaxProblemMarkerSeverity(file, IResource.DEPTH_ONE); if (!file.getName().toLowerCase().endsWith(".inc")) { //$NON-NLS-1$ // Assume Orc file switch (sev) { case IMarker.SEVERITY_ERROR: elemImage = ORC_FILE_W_ERROR; break; case IMarker.SEVERITY_WARNING: elemImage = ORC_FILE_W_WARNING; break; default: elemImage = ORC_FILE_OBJ_IMAGE; break; } } else { // Include file switch (sev) { case IMarker.SEVERITY_ERROR: elemImage = ORC_INCLUDE_W_ERROR; break; case IMarker.SEVERITY_WARNING: elemImage = ORC_INCLUDE_W_WARNING; break; default: elemImage = ORC_INCLUDE_OBJ_IMAGE; break; } } return elemImage; } final AST n = element instanceof ModelTreeNode ? (AST) ((ModelTreeNode) element).getASTNode() : (AST) element; return getImageFor(n); } /** * @param n AST node to retrieve an image * @return Image representing the type of the given AST node */ public static Image getImageFor(final AST n) { if (n instanceof Include) { return ORC_INCLUDE_OBJ_IMAGE; } if (n instanceof DefSig) { return ORC_DEF_TYPE_OBJ_IMAGE; } if (n instanceof Def || n instanceof DefCapsule) { return ORC_DEF_OBJ_IMAGE; } if (n instanceof SiteDeclaration) { return ORC_SITE_OBJ_IMAGE; } if (n instanceof ClassImport) { return ORC_CLASS_OBJ_IMAGE; } if (n instanceof Val) { return ORC_VARIABLE_OBJ_IMAGE; } if (n instanceof TypeDeclaration) { return ORC_TYPE_OBJ_IMAGE; } return ORC_GENERIC_OBJ_IMAGE; } /* (non-Javadoc) * @see org.eclipse.jface.viewers.ILabelProvider#getText(java.lang.Object) */ public String getText(final Object element) { final AST n = element instanceof ModelTreeNode ? (AST) ((ModelTreeNode) element).getASTNode() : (AST) element; return getLabelFor(n); } /** * @param n AST node to label * @return String representing a label of the given AST node */ public static String getLabelFor(final AST n) { if (n instanceof Include) { final Include idecl = (Include) n; return idecl.origin(); } if (n instanceof Def) { final Def dmc = (Def) n; return sigToString(dmc); } if (n instanceof DefCapsule) { final DefCapsule dmc = (DefCapsule) n; return sigToString(dmc); } if (n instanceof DefSig) { final DefSig dmt = (DefSig) n; return sigToString(dmt); } if (n instanceof SiteDeclaration) { return ((SiteDeclaration) n).name(); } if (n instanceof ClassImport) { return ((ClassImport) n).name(); } if (n instanceof Val) { return ((Val) n).p().toOrcSyntax(); } if (n instanceof TypeDeclaration) { return ((TypeDeclaration) n).name(); } // If we get here, someone forgot to add a case above.... return "<" + n.getClass().getSimpleName() + ">"; //$NON-NLS-1$ //$NON-NLS-2$ } /* (non-Javadoc) * @see org.eclipse.jface.viewers.IBaseLabelProvider#addListener(org.eclipse.jface.viewers.ILabelProviderListener) */ public void addListener(final ILabelProviderListener listener) { fListeners.add(listener); } /* (non-Javadoc) * @see org.eclipse.jface.viewers.IBaseLabelProvider#dispose() */ public void dispose() { } /* (non-Javadoc) * @see org.eclipse.jface.viewers.IBaseLabelProvider#isLabelProperty(java.lang.Object, java.lang.String) */ public boolean isLabelProperty(final Object element, final String property) { return false; } /* (non-Javadoc) * @see org.eclipse.jface.viewers.IBaseLabelProvider#removeListener(org.eclipse.jface.viewers.ILabelProviderListener) */ public void removeListener(final ILabelProviderListener listener) { fListeners.remove(listener); } private static String sigToString(final Def d) { final StringBuilder s = new StringBuilder(); s.append(d.name()); for (final scala.collection.immutable.List<Pattern> ps : JavaConversions.asIterable(d.formals())) { s.append('('); if (ps != null) { s.append(listMkOrcString(JavaConversions.asIterable(ps), ",")); //$NON-NLS-1$ } s.append(')'); } return s.toString(); } private static String sigToString(final DefCapsule d) { final StringBuilder s = new StringBuilder(); s.append(d.name()); for (final scala.collection.immutable.List<Pattern> ps : JavaConversions.asIterable(d.formals())) { s.append('('); if (ps != null) { s.append(listMkOrcString(JavaConversions.asIterable(ps), ",")); //$NON-NLS-1$ } s.append(')'); } return s.toString(); } private static String sigToString(final DefSig d) { final StringBuilder s = new StringBuilder(); s.append(d.name()); if (d.typeformals() != null) { s.append('['); s.append(listMkString(JavaConversions.asIterable(d.typeformals()), ", ")); //$NON-NLS-1$ s.append(']'); } for (final scala.collection.immutable.List<Type> argTypes : JavaConversions.asIterable(d.argtypes())) { s.append('('); if (argTypes != null) { s.append(listMkString(JavaConversions.asIterable(argTypes), ",")); //$NON-NLS-1$ } s.append(')'); } s.append(" :: "); //$NON-NLS-1$ s.append(d.returntype()); return s.toString(); } private static String listMkString(final Iterable<?> theList, final String sep) { final StringBuilder sb = new StringBuilder(); for (final Object o : theList) { sb.append(o.toString()); sb.append(sep); } if (sb.length() == 0) { return ""; //$NON-NLS-1$ } else { return sb.substring(0, sb.length() - sep.length()); } } private static String listMkOrcString(final Iterable<Pattern> theList, final String sep) { //TODO: As toOrcSyntax() is available on more general types than, Pattern update types in here final StringBuilder sb = new StringBuilder(); for (final Pattern p : theList) { sb.append(p.toOrcSyntax()); sb.append(sep); } if (sb.length() == 0) { return ""; //$NON-NLS-1$ } else { return sb.substring(0, sb.length() - sep.length()); } } }
LabelProvider uses OrcSyntaxConvertable trait
OrcEclipse/src/edu/utexas/cs/orc/orceclipse/edit/OrcLabelProvider.java
LabelProvider uses OrcSyntaxConvertable trait
<ide><path>rcEclipse/src/edu/utexas/cs/orc/orceclipse/edit/OrcLabelProvider.java <ide> import java.util.Set; <ide> <ide> import orc.ast.AST; <add>import orc.ast.OrcSyntaxConvertable; <ide> import orc.ast.ext.ClassImport; <ide> import orc.ast.ext.Def; <ide> import orc.ast.ext.DefCapsule; <ide> for (final scala.collection.immutable.List<Pattern> ps : JavaConversions.asIterable(d.formals())) { <ide> s.append('('); <ide> if (ps != null) { <del> s.append(listMkOrcString(JavaConversions.asIterable(ps), ",")); //$NON-NLS-1$ <add> s.append(listMkString(JavaConversions.asIterable(ps), ", ")); //$NON-NLS-1$ <ide> } <ide> s.append(')'); <ide> } <ide> for (final scala.collection.immutable.List<Pattern> ps : JavaConversions.asIterable(d.formals())) { <ide> s.append('('); <ide> if (ps != null) { <del> s.append(listMkOrcString(JavaConversions.asIterable(ps), ",")); //$NON-NLS-1$ <add> s.append(listMkString(JavaConversions.asIterable(ps), ", ")); //$NON-NLS-1$ <ide> } <ide> s.append(')'); <ide> } <ide> <ide> s.append(d.name()); <ide> <del> if (d.typeformals() != null) { <add> if (d.typeformals() != null && d.typeformals().size() > 0) { <ide> s.append('['); <ide> s.append(listMkString(JavaConversions.asIterable(d.typeformals()), ", ")); //$NON-NLS-1$ <ide> s.append(']'); <ide> for (final scala.collection.immutable.List<Type> argTypes : JavaConversions.asIterable(d.argtypes())) { <ide> s.append('('); <ide> if (argTypes != null) { <del> s.append(listMkString(JavaConversions.asIterable(argTypes), ",")); //$NON-NLS-1$ <add> s.append(listMkString(JavaConversions.asIterable(argTypes), ", ")); //$NON-NLS-1$ <ide> } <ide> s.append(')'); <ide> } <ide> <ide> s.append(" :: "); //$NON-NLS-1$ <del> s.append(d.returntype()); <add> s.append(d.returntype().toOrcSyntax()); <ide> <ide> return s.toString(); <ide> } <ide> private static String listMkString(final Iterable<?> theList, final String sep) { <ide> final StringBuilder sb = new StringBuilder(); <ide> for (final Object o : theList) { <del> sb.append(o.toString()); <add> if (o instanceof OrcSyntaxConvertable) { <add> sb.append(((OrcSyntaxConvertable) o).toOrcSyntax()); <add> } else { <add> sb.append(o.toString()); <add> } <ide> sb.append(sep); <ide> } <ide> if (sb.length() == 0) { <ide> } <ide> } <ide> <del> private static String listMkOrcString(final Iterable<Pattern> theList, final String sep) { <del> //TODO: As toOrcSyntax() is available on more general types than, Pattern update types in here <del> final StringBuilder sb = new StringBuilder(); <del> for (final Pattern p : theList) { <del> sb.append(p.toOrcSyntax()); <del> sb.append(sep); <del> } <del> if (sb.length() == 0) { <del> return ""; //$NON-NLS-1$ <del> } else { <del> return sb.substring(0, sb.length() - sep.length()); <del> } <del> } <del> <ide> }
Java
apache-2.0
b5957481580f68cc46d757747fdfd3db36df7814
0
ador/ProteinPatternSearch
package protka; import java.util.ArrayList; import java.util.List; public class Protein { private List<String> lines = new ArrayList<String>(); private String sequence = ""; private String acNum; private List<SequencePart> tmDomains; private List<SequencePart> inOutDomains; private List<SequencePart> funcDomains; private static int MINLENGTH; private static int MAXLENGTH; private List<SequencePart> allDomains; private List<String> taxonomy; public String getAcNum() { return acNum; } public static void setMinLength(int length) { MINLENGTH = length; } public static void setMaxLength(int length) { MAXLENGTH = length; } public Boolean getBeginsInside() { return beginsInside; } public void setBeginsInside(boolean beginsInside) { this.beginsInside = new Boolean(beginsInside); } private Boolean beginsInside = null; public List<String> getLines() { return lines; } public void setLines(List<String> data) { lines.clear(); sequence = ""; boolean seqReading = false; for (String dataPart: data) { String[] linesRead = dataPart.split("\\r?\\n"); for (String l : linesRead){ if (seqReading && !l.startsWith("//")) { String seqPart = l.trim(); String s2 = seqPart.replaceAll(" ", ""); sequence = sequence + s2; } lines.add(dataPart); if (l.startsWith("SQ")) { seqReading = true; } if (l.startsWith("AC")) { String id = dataPart.split(" ")[1]; acNum = id.split(";")[0]; } } } readDomains(); } // returns the first matching line, or null if no match public String getLine(String startsWith) { for (String line : lines) { if (line.startsWith(startsWith)) { return line; } } return null; } // returns all matching lines, or empty list if no match public List<String> getLines(String startsWith, String contains) { ArrayList<String> ret = new ArrayList<String>(); for (String line : lines) { if (line.startsWith(startsWith) && line.contains(contains)) { ret.add(line); } } return ret; } public String getSequence() { return sequence; } public List<SequencePart> getTmDomains() { return tmDomains; } public List<SequencePart> getFuncDomains() { return funcDomains; } private ArrayList<SequencePart> parseDomainParts(List<String> lines, String type) { ArrayList<SequencePart> ret = new ArrayList<SequencePart>(); for (String line : lines) { String fromStr = line.substring(15, 20).trim(); String toStr = line.substring(21, 27).trim(); if (fromStr.contains("?") || toStr.contains("?")) { continue; } if (fromStr.startsWith("<") || fromStr.startsWith(">")) { fromStr = fromStr.substring(1); } if (toStr.startsWith("<") || toStr.startsWith(">")) { toStr = toStr.substring(1); } // -1 because the data file indexes letters in strings starting with 1 int from = Integer.parseInt(fromStr) - 1; int to = Integer.parseInt(toStr) - 1; ret.add(new SequencePart(from, to, type)); } return ret; } private void readDomains() { if (tmDomains == null || allDomains == null) { tmDomains = new ArrayList<SequencePart>(); inOutDomains = new ArrayList<SequencePart>(); funcDomains = new ArrayList<SequencePart>(); allDomains = new ArrayList<SequencePart>(); } else { tmDomains.clear(); inOutDomains.clear(); funcDomains.clear(); allDomains.clear(); } readTmDomains(); readInOutDomains(); readFuncDomains(); } private void readTmDomains() { List<String> ftLines = getLines("FT TRANSMEM" , ""); tmDomains = parseDomainParts(ftLines, "TM"); allDomains.addAll(tmDomains); } private void readFuncDomains() { List<String> ftLines = getLines("FT DOMAIN" , ""); if (ftLines.isEmpty()) { return; } funcDomains = parseDomainParts(ftLines, "DOMAIN"); allDomains.addAll(funcDomains); } private void readInOutDomains() { List<String> innerDomainLines = getLines("FT TOPO_DOM", "Cytoplasmic"); List<SequencePart> inDomains = parseDomainParts(innerDomainLines, "IN"); List<String> outerDomainLines = getLines("FT TOPO_DOM", "Extracellular"); List<SequencePart> outDomains = parseDomainParts(outerDomainLines, "OUT"); inOutDomains.addAll(inDomains); inOutDomains.addAll(outDomains); allDomains.addAll(inDomains); allDomains.addAll(outDomains); int firstIn = Integer.MAX_VALUE; int firstOut = Integer.MAX_VALUE; if (!inDomains.isEmpty()) { firstIn = inDomains.get(0).getFrom(); } if (!outDomains.isEmpty()) { firstOut = outDomains.get(0).getFrom(); } if (firstIn < firstOut) { setBeginsInside(true); } else if (firstOut < Integer.MAX_VALUE) { setBeginsInside(false); } } // indexing goes from 0 ! returns inclusive public String getSequencePart(int from, int to) { String sequence = getSequence(); if (to < sequence.length() && from >= 0 && from <= to) { String ret = sequence.substring(from, to + 1); return ret; } return null; } private SequencePart getForwardSection(int i) { int tmBeginning = tmDomains.get(i).getFrom(); int tmEnding = tmDomains.get(i).getTo(); // looking forwards from tm part int externalBeginning = tmEnding + 1; int externalEnding; if (i == tmDomains.size() - 1) { // the last part.. checking against the end of the whole sequence externalEnding = getSequence().length(); } else { externalEnding = tmDomains.get(i + 1).getFrom() - 1; } int diff = externalEnding - externalBeginning; if (diff >= MINLENGTH - 1) { if (diff >= MAXLENGTH - 1) { return new SequencePart(tmBeginning, tmEnding + MAXLENGTH, "TM"); } else { return new SequencePart(tmBeginning, externalEnding, "TM"); } } return null; } private SequencePart getBackwardSection(int i) { int tmBeginning = tmDomains.get(i).getFrom(); int tmEnding = tmDomains.get(i).getTo(); // looking backwards from tm part int externalBeginning; int externalEnding = tmBeginning - 1; if (i == 0) { // potentially beginning at the very beginning of the whole seq. externalBeginning = 0; } else { externalBeginning = tmDomains.get(i - 1).getTo() + 1; } int diff = externalEnding - externalBeginning; if (diff >= MINLENGTH - 1) { if (diff >= MAXLENGTH - 1) { return new SequencePart(tmBeginning - MAXLENGTH, tmEnding, "TM"); } else { return new SequencePart(externalBeginning, tmEnding, "TM"); } } return null; } public SequencePart getSeqForTmPart(int i) { getTmDomains(); if ((!beginsInside && i % 2 == 1 || beginsInside && i % 2 == 0)) { return getForwardSection(i); } else { return getBackwardSection(i); } } public boolean hasTmOrientationInfo() { if (!inOutDomains.isEmpty()) { return true; } return false; } public SequencePart getNearestDomain(int posInSeq, int maxDist) { if (funcDomains.isEmpty()) { return null; } int minDist = maxDist; SequencePart ret = null; for (SequencePart sqp: funcDomains) { if (Math.abs(sqp.getFrom() - posInSeq) < minDist) { minDist = Math.abs(sqp.getFrom() - posInSeq); ret = sqp; } if (Math.abs(sqp.getTo() - posInSeq) < minDist) { minDist = Math.abs(sqp.getTo() - posInSeq); ret = sqp; } } if (minDist <= maxDist) { return ret; } return null; } public void readTaxonomy() { List<String> ocLines = getLines("OC", ""); taxonomy = new ArrayList<String>(); for (String line: ocLines) { String lineContent = line.substring(3); String[] sArray = lineContent.split(";"); for (String s: sArray) { if (s.trim().length() > 0) taxonomy.add(s.trim()); } } } public List<String> getTaxonomyList() { if (taxonomy == null) { readTaxonomy(); } return taxonomy; } public String getSpecies() { List<String> osLines = getLines("OS", ""); String species = ""; for (String line: osLines) { String lineContent = line.substring(3).trim(); species += lineContent; } return species; } }
java/src/main/java/protka/Protein.java
package protka; import java.util.ArrayList; import java.util.List; public class Protein { private List<String> lines = new ArrayList<String>(); private String sequence = ""; private String acNum; private List<SequencePart> tmDomains; private List<SequencePart> inOutDomains; private List<SequencePart> funcDomains; private static int MINLENGTH; private static int MAXLENGTH; private List<SequencePart> allDomains; private List<String> taxonomy; public String getAcNum() { return acNum; } public static void setMinLength(int length) { MINLENGTH = length; } public static void setMaxLength(int length) { MAXLENGTH = length; } public Boolean getBeginsInside() { return beginsInside; } public void setBeginsInside(boolean beginsInside) { this.beginsInside = new Boolean(beginsInside); } private Boolean beginsInside = null; public List<String> getLines() { return lines; } public void setLines(List<String> data) { lines.clear(); sequence = ""; boolean seqReading = false; for (String dataPart: data) { String[] linesRead = dataPart.split("\\r?\\n"); for (String l : linesRead){ if (seqReading && !l.startsWith("//")) { String seqPart = l.trim(); String s2 = seqPart.replaceAll(" ", ""); sequence = sequence + s2; } lines.add(dataPart); if (l.startsWith("SQ")) { seqReading = true; } if (l.startsWith("AC")) { String id = dataPart.split(" ")[1]; acNum = id.split(";")[0]; } } } readDomains(); } // returns the first matching line, or null if no match public String getLine(String startsWith) { for (String line : lines) { if (line.startsWith(startsWith)) { return line; } } return null; } // returns all matching lines, or empty list if no match public List<String> getLines(String startsWith, String contains) { ArrayList<String> ret = new ArrayList<String>(); for (String line : lines) { if (line.startsWith(startsWith) && line.contains(contains)) { ret.add(line); } } return ret; } public String getSequence() { return sequence; } public List<SequencePart> getTmNumbers() { return tmDomains; } public List<SequencePart> getFuncDomains() { return funcDomains; } private ArrayList<SequencePart> parseDomainParts(List<String> lines, String type) { ArrayList<SequencePart> ret = new ArrayList<SequencePart>(); for (String line : lines) { String fromStr = line.substring(15, 20).trim(); String toStr = line.substring(21, 27).trim(); if (fromStr.contains("?") || toStr.contains("?")) { continue; } if (fromStr.startsWith("<") || fromStr.startsWith(">")) { fromStr = fromStr.substring(1); } if (toStr.startsWith("<") || toStr.startsWith(">")) { toStr = toStr.substring(1); } // -1 because the data file indexes letters in strings starting with 1 int from = Integer.parseInt(fromStr) - 1; int to = Integer.parseInt(toStr) - 1; ret.add(new SequencePart(from, to, type)); } return ret; } private void readDomains() { if (tmDomains == null || allDomains == null) { tmDomains = new ArrayList<SequencePart>(); inOutDomains = new ArrayList<SequencePart>(); funcDomains = new ArrayList<SequencePart>(); allDomains = new ArrayList<SequencePart>(); } else { tmDomains.clear(); inOutDomains.clear(); funcDomains.clear(); allDomains.clear(); } readTmDomains(); readInOutDomains(); readFuncDomains(); } private void readTmDomains() { List<String> ftLines = getLines("FT TRANSMEM" , ""); tmDomains = parseDomainParts(ftLines, "TM"); allDomains.addAll(tmDomains); } private void readFuncDomains() { List<String> ftLines = getLines("FT DOMAIN" , ""); if (ftLines.isEmpty()) { return; } funcDomains = parseDomainParts(ftLines, "DOMAIN"); allDomains.addAll(funcDomains); } private void readInOutDomains() { List<String> innerDomainLines = getLines("FT TOPO_DOM", "Cytoplasmic"); List<SequencePart> inDomains = parseDomainParts(innerDomainLines, "IN"); List<String> outerDomainLines = getLines("FT TOPO_DOM", "Extracellular"); List<SequencePart> outDomains = parseDomainParts(outerDomainLines, "OUT"); inOutDomains.addAll(inDomains); inOutDomains.addAll(outDomains); allDomains.addAll(inDomains); allDomains.addAll(outDomains); int firstIn = Integer.MAX_VALUE; int firstOut = Integer.MAX_VALUE; if (!inDomains.isEmpty()) { firstIn = inDomains.get(0).getFrom(); } if (!outDomains.isEmpty()) { firstOut = outDomains.get(0).getFrom(); } if (firstIn < firstOut) { setBeginsInside(true); } else if (firstOut < Integer.MAX_VALUE) { setBeginsInside(false); } } // indexing goes from 0 ! returns inclusive public String getSequencePart(int from, int to) { String sequence = getSequence(); if (to < sequence.length() && from >= 0 && from <= to) { String ret = sequence.substring(from, to + 1); return ret; } return null; } private SequencePart getForwardSection(int i) { int tmBeginning = tmDomains.get(i).getFrom(); int tmEnding = tmDomains.get(i).getTo(); // looking forwards from tm part int externalBeginning = tmEnding + 1; int externalEnding; if (i == tmDomains.size() - 1) { // the last part.. checking against the end of the whole sequence externalEnding = getSequence().length(); } else { externalEnding = tmDomains.get(i + 1).getFrom() - 1; } int diff = externalEnding - externalBeginning; if (diff >= MINLENGTH - 1) { if (diff >= MAXLENGTH - 1) { return new SequencePart(tmBeginning, tmEnding + MAXLENGTH, "TM"); } else { return new SequencePart(tmBeginning, externalEnding, "TM"); } } return null; } private SequencePart getBackwardSection(int i) { int tmBeginning = tmDomains.get(i).getFrom(); int tmEnding = tmDomains.get(i).getTo(); // looking backwards from tm part int externalBeginning; int externalEnding = tmBeginning - 1; if (i == 0) { // potentially beginning at the very beginning of the whole seq. externalBeginning = 0; } else { externalBeginning = tmDomains.get(i - 1).getTo() + 1; } int diff = externalEnding - externalBeginning; if (diff >= MINLENGTH - 1) { if (diff >= MAXLENGTH - 1) { return new SequencePart(tmBeginning - MAXLENGTH, tmEnding, "TM"); } else { return new SequencePart(externalBeginning, tmEnding, "TM"); } } return null; } public SequencePart getSeqForTmPart(int i) { getTmNumbers(); if ((!beginsInside && i % 2 == 1 || beginsInside && i % 2 == 0)) { return getForwardSection(i); } else { return getBackwardSection(i); } } public boolean hasTmOrientationInfo() { if (!inOutDomains.isEmpty()) { return true; } return false; } public SequencePart getNearestDomain(int posInSeq, int maxDist) { if (funcDomains.isEmpty()) { return null; } int minDist = maxDist; SequencePart ret = null; for (SequencePart sqp: funcDomains) { if (Math.abs(sqp.getFrom() - posInSeq) < minDist) { minDist = Math.abs(sqp.getFrom() - posInSeq); ret = sqp; } if (Math.abs(sqp.getTo() - posInSeq) < minDist) { minDist = Math.abs(sqp.getTo() - posInSeq); ret = sqp; } } if (minDist <= maxDist) { return ret; } return null; } public void readTaxonomy() { List<String> ocLines = getLines("OC", ""); taxonomy = new ArrayList<String>(); for (String line: ocLines) { String lineContent = line.substring(3); String[] sArray = lineContent.split(";"); for (String s: sArray) { if (s.trim().length() > 0) taxonomy.add(s.trim()); } } } public List<String> getTaxonomyList() { if (taxonomy == null) { readTaxonomy(); } return taxonomy; } public String getSpecies() { List<String> osLines = getLines("OS", ""); String species = ""; for (String line: osLines) { String lineContent = line.substring(3).trim(); species += lineContent; } return species; } }
Protein refactor
java/src/main/java/protka/Protein.java
Protein refactor
<ide><path>ava/src/main/java/protka/Protein.java <ide> return sequence; <ide> } <ide> <del> public List<SequencePart> getTmNumbers() { <add> public List<SequencePart> getTmDomains() { <ide> return tmDomains; <ide> } <ide> <ide> } <ide> <ide> public SequencePart getSeqForTmPart(int i) { <del> getTmNumbers(); <add> getTmDomains(); <ide> <ide> if ((!beginsInside && i % 2 == 1 || beginsInside && i % 2 == 0)) { <ide> return getForwardSection(i);
Java
mit
a82c399a56d0428d7d4a934bee0a49978cb9b425
0
trendrr/java-oss-lib,MarkG/java-oss-lib
/** * */ package com.trendrr.oss.executionreport; import java.util.Date; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.trendrr.oss.StringHelper; /** * @author Dustin Norlander * @created Oct 11, 2012 * */ public class ExecutionSubReport implements ExecutionReportIncrementor { protected static Log log = LogFactory.getLog(ExecutionSubReport.class); ExecutionReport report; String namespace; public ExecutionSubReport(String namespace, ExecutionReport report) { this.report = report; this.namespace = StringHelper.trim(namespace, "."); } /** * * will return null when it reaches the top. * * @return */ public ExecutionReportIncrementor getParent() { int ind = namespace.lastIndexOf('.'); if (ind == -1) { return report; } return new ExecutionSubReport(namespace.substring(0, ind), report); } protected String getKey(String key) { if (key == null || key.isEmpty()) { return this.namespace; } return this.namespace + "." + key; } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String, long, java.util.Date) */ @Override public void inc(String key, long amount, Date start) { this.report.inc(this.getKey(key), amount, start); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String, long, long) */ @Override public void inc(String key, long amount, long millis) { this.report.inc(this.getKey(key), amount, millis); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String, long) */ @Override public void inc(String key, long amount) { this.report.inc(this.getKey(key), amount); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String, java.util.Date) */ @Override public void inc(String key, Date start) { this.report.inc(this.getKey(key), start); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String) */ @Override public void inc(String key) { this.report.inc(this.getKey(key)); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc(long, java.util.Date) */ @Override public void inc(long amount, Date start) { this.inc("", amount, start); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc(long, long) */ @Override public void inc(long amount, long millis) { this.inc("", amount, millis); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc(long) */ @Override public void inc(long amount) { this.inc("", amount); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc(java.util.Date) */ @Override public void inc(Date start) { this.inc("", start); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc() */ @Override public void inc() { this.inc(""); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#getChild(java.lang.String) */ @Override public ExecutionReportIncrementor getChild(String key) { return new ExecutionSubReport(this.namespace + "." + key, this.report); } }
src/main/com/trendrr/oss/executionreport/ExecutionSubReport.java
/** * */ package com.trendrr.oss.executionreport; import java.util.Date; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.trendrr.oss.StringHelper; /** * @author Dustin Norlander * @created Oct 11, 2012 * */ public class ExecutionSubReport implements ExecutionReportIncrementor { protected static Log log = LogFactory.getLog(ExecutionSubReport.class); ExecutionReport report; String namespace; public ExecutionSubReport(String namespace, ExecutionReport report) { this.report = report; this.namespace = StringHelper.trim(namespace, "."); } /** * * will return null when it reaches the top. * * @return */ public ExecutionReportIncrementor getParent() { int ind = namespace.lastIndexOf('.'); if (ind == -1) { return report; } return new ExecutionSubReport(namespace.substring(0, ind), report); } protected String getKey(String key) { return this.namespace + "." + key; } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String, long, java.util.Date) */ @Override public void inc(String key, long amount, Date start) { this.report.inc(this.getKey(key), amount, start); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String, long, long) */ @Override public void inc(String key, long amount, long millis) { this.report.inc(this.getKey(key), amount, millis); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String, long) */ @Override public void inc(String key, long amount) { this.report.inc(this.getKey(key), amount); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String, java.util.Date) */ @Override public void inc(String key, Date start) { this.report.inc(this.getKey(key), start); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExReport#inc(java.lang.String) */ @Override public void inc(String key) { this.report.inc(this.getKey(key)); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc(long, java.util.Date) */ @Override public void inc(long amount, Date start) { this.inc("", amount, start); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc(long, long) */ @Override public void inc(long amount, long millis) { this.inc("", amount, millis); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc(long) */ @Override public void inc(long amount) { this.inc("", amount); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc(java.util.Date) */ @Override public void inc(Date start) { this.inc("", start); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#inc() */ @Override public void inc() { this.inc(""); } /* (non-Javadoc) * @see com.trendrr.oss.executionreport.ExecutionReportIncrementor#getChild(java.lang.String) */ @Override public ExecutionReportIncrementor getChild(String key) { return new ExecutionSubReport(this.namespace + "." + key, this.report); } }
correctly handle empty key
src/main/com/trendrr/oss/executionreport/ExecutionSubReport.java
correctly handle empty key
<ide><path>rc/main/com/trendrr/oss/executionreport/ExecutionSubReport.java <ide> } <ide> <ide> protected String getKey(String key) { <add> if (key == null || key.isEmpty()) { <add> return this.namespace; <add> } <ide> return this.namespace + "." + key; <ide> } <ide> /* (non-Javadoc)
Java
bsd-2-clause
759d57f33b939eaa3ee0be316de30cef968a3d2d
0
scifio/scifio
/* * #%L * SCIFIO library for reading and converting scientific file formats. * %% * Copyright (C) 2011 - 2015 Board of Regents of the University of * Wisconsin-Madison * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package io.scif.formats; import io.scif.AbstractChecker; import io.scif.AbstractFormat; import io.scif.AbstractMetadata; import io.scif.AbstractParser; import io.scif.ByteArrayPlane; import io.scif.ByteArrayReader; import io.scif.FilePattern; import io.scif.Format; import io.scif.FormatException; import io.scif.HasColorTable; import io.scif.ImageMetadata; import io.scif.MetadataLevel; import io.scif.UnsupportedCompressionException; import io.scif.codec.Codec; import io.scif.codec.CodecOptions; import io.scif.codec.CodecService; import io.scif.codec.JPEG2000Codec; import io.scif.codec.JPEGCodec; import io.scif.codec.PackbitsCodec; import io.scif.common.DataTools; import io.scif.config.SCIFIOConfig; import io.scif.io.Location; import io.scif.io.RandomAccessInputStream; import io.scif.services.InitializeService; import io.scif.util.FormatTools; import java.io.IOException; import java.util.Arrays; import java.util.Hashtable; import java.util.Vector; import net.imagej.axis.Axes; import net.imglib2.display.ColorTable; import net.imglib2.display.ColorTable16; import net.imglib2.display.ColorTable8; import org.scijava.plugin.Parameter; import org.scijava.plugin.Plugin; /** * DICOMReader is the file format reader for DICOM files. Much of this code is * adapted from <a * href="http://imagej.net/developer/source/ij/plugin/DICOM.java.html">ImageJ's * DICOM reader</a>. * * @author Mark Hiner */ @Plugin(type = Format.class, name = "DICOM") public class DICOMFormat extends AbstractFormat { // -- Constants -- public static final String DICOM_MAGIC_STRING = "DICM"; private static final Hashtable<Integer, String> TYPES = buildTypes(); // -- AbstractFormat Methods -- @Override protected String[] makeSuffixArray() { return new String[] { "dic", "dcm", "dicom", "jp2", "j2ki", "j2kr", "raw", "ima" }; } // -- Static Helper methods -- /** * Assemble the data dictionary. This is incomplete at best, since there are * literally thousands of fields defined by the DICOM specifications. */ private static Hashtable<Integer, String> buildTypes() { final Hashtable<Integer, String> dict = new Hashtable<Integer, String>(); dict.put(0x00020002, "Media Storage SOP Class UID"); dict.put(0x00020003, "Media Storage SOP Instance UID"); dict.put(0x00020010, "Transfer Syntax UID"); dict.put(0x00020012, "Implementation Class UID"); dict.put(0x00020013, "Implementation Version Name"); dict.put(0x00020016, "Source Application Entity Title"); dict.put(0x00080005, "Specific Character Set"); dict.put(0x00080008, "Image Type"); dict.put(0x00080010, "Recognition Code"); dict.put(0x00080012, "Instance Creation Date"); dict.put(0x00080013, "Instance Creation Time"); dict.put(0x00080014, "Instance Creator UID"); dict.put(0x00080016, "SOP Class UID"); dict.put(0x00080018, "SOP Instance UID"); dict.put(0x0008001a, "Related General SOP Class UID"); dict.put(0x0008001b, "Original Specialized SOP Class UID"); dict.put(0x00080020, "Study Date"); dict.put(0x00080021, "Series Date"); dict.put(0x00080022, "Acquisition Date"); dict.put(0x00080023, "Content Date"); dict.put(0x00080024, "Overlay Date"); dict.put(0x00080025, "Curve Date"); dict.put(0x0008002a, "Acquisition Date/Time"); dict.put(0x00080030, "Study Time"); dict.put(0x00080031, "Series Time"); dict.put(0x00080032, "Acquisition Time"); dict.put(0x00080033, "Content Time"); dict.put(0x00080034, "Overlay Time"); dict.put(0x00080035, "Curve Time"); dict.put(0x00080041, "Data Set Subtype"); dict.put(0x00080050, "Accession Number"); dict.put(0x00080052, "Query/Retrieve Level"); dict.put(0x00080054, "Retrieve AE Title"); dict.put(0x00080056, "Instance Availability"); dict.put(0x00080058, "Failed SOP Instance UID List"); dict.put(0x00080060, "Modality"); dict.put(0x00080061, "Modalities in Study"); dict.put(0x00080062, "SOP Classes in Study"); dict.put(0x00080064, "Conversion Type"); dict.put(0x00080068, "Presentation Intent Type"); dict.put(0x00080070, "Manufacturer"); dict.put(0x00080080, "Institution Name"); dict.put(0x00080081, "Institution Address"); dict.put(0x00080082, "Institution Code Sequence"); dict.put(0x00080090, "Referring Physician's Name"); dict.put(0x00080092, "Referring Physician's Address"); dict.put(0x00080094, "Referring Physician's Telephone"); dict.put(0x00080096, "Referring Physician ID"); dict.put(0x00080100, "Code Value"); dict.put(0x00080102, "Coding Scheme Designator"); dict.put(0x00080103, "Coding Scheme Version"); dict.put(0x00080104, "Code Meaning"); dict.put(0x00080105, "Mapping Resource"); dict.put(0x00080106, "Context Group Version"); dict.put(0x00080107, "Context Group Local Version"); dict.put(0x0008010b, "Context Group Extension Flag"); dict.put(0x0008010c, "Coding Scheme UID"); dict.put(0x0008010d, "Context Group Extension Creator UID"); dict.put(0x0008010f, "Context ID"); dict.put(0x00080110, "Coding Scheme ID"); dict.put(0x00080112, "Coding Scheme Registry"); dict.put(0x00080114, "Coding Scheme External ID"); dict.put(0x00080115, "Coding Scheme Name"); dict.put(0x00080116, "Responsible Organization"); dict.put(0x00080201, "Timezone Offset from UTC"); dict.put(0x00081010, "Station Name"); dict.put(0x00081030, "Study Description"); dict.put(0x00081032, "Procedure Code Sequence"); dict.put(0x0008103e, "Series Description"); dict.put(0x00081040, "Institutional Department Name"); dict.put(0x00081048, "Physician(s) of Record"); dict.put(0x00081049, "Physician(s) of Record ID"); dict.put(0x00081050, "Performing Physician's Name"); dict.put(0x00081052, "Performing Physican ID"); dict.put(0x00081060, "Name of Physician(s) Reading Study"); dict.put(0x00081062, "Physician(s) Reading Study ID"); dict.put(0x00081070, "Operator's Name"); dict.put(0x00081072, "Operator ID"); dict.put(0x00081080, "Admitting Diagnoses Description"); dict.put(0x00081084, "Admitting Diagnoses Code Sequence"); dict.put(0x00081090, "Manufacturer's Model Name"); dict.put(0x00081100, "Referenced Results Sequence"); dict.put(0x00081110, "Referenced Study Sequence"); dict.put(0x00081111, "Referenced Performed Procedure Step"); dict.put(0x00081115, "Referenced Series Sequence"); dict.put(0x00081120, "Referenced Patient Sequence"); dict.put(0x00081125, "Referenced Visit Sequence"); dict.put(0x00081130, "Referenced Overlay Sequence"); dict.put(0x0008113a, "Referenced Waveform Sequence"); dict.put(0x00081140, "Referenced Image Sequence"); dict.put(0x00081145, "Referenced Curve Sequence"); dict.put(0x0008114a, "Referenced Instance Sequence"); dict.put(0x00081150, "Referenced SOP Class UID"); dict.put(0x00081155, "Referenced SOP Instance UID"); dict.put(0x0008115a, "SOP Classes Supported"); dict.put(0x00081160, "Referenced Frame Number"); dict.put(0x00081195, "Transaction UID"); dict.put(0x00081197, "Failure Reason"); dict.put(0x00081198, "Failed SOP Sequence"); dict.put(0x00081199, "Referenced SOP Sequence"); dict.put(0x00081200, "Studies Containing Other Referenced Instances Sequence"); dict.put(0x00081250, "Related Series Sequence"); dict.put(0x00082111, "Derivation Description"); dict.put(0x00082112, "Source Image Sequence"); dict.put(0x00082120, "Stage Name"); dict.put(0x00082122, "Stage Number"); dict.put(0x00082124, "Number of Stages"); dict.put(0x00082127, "View Name"); dict.put(0x00082128, "View Number"); dict.put(0x00082129, "Number of Event Timers"); dict.put(0x0008212a, "Number of Views in Stage"); dict.put(0x00082130, "Event Elapsed Time(s)"); dict.put(0x00082132, "Event Timer Name(s)"); dict.put(0x00082142, "Start Trim"); dict.put(0x00082143, "Stop Trim"); dict.put(0x00082144, "Recommended Display Frame Rate"); dict.put(0x00082218, "Anatomic Region Sequence"); dict.put(0x00082220, "Anatomic Region Modifier Sequence"); dict.put(0x00082228, "Primary Anatomic Structure Sequence"); dict.put(0x00082229, "Anatomic Structure Sequence"); dict.put(0x00082230, "Primary Anatomic Structure Modifier"); dict.put(0x00082240, "Transducer Position Sequence"); dict.put(0x00082242, "Transducer Position Modifier Sequence"); dict.put(0x00082244, "Transducer Orientation Sequence"); dict.put(0x00082246, "Transducer Orientation Modifier"); dict.put(0x00083001, "Alternate Representation Sequence"); dict.put(0x00089007, "Frame Type"); dict.put(0x00089092, "Referenced Image Evidence Sequence"); dict.put(0x00089121, "Referenced Raw Data Sequence"); dict.put(0x00089123, "Creator-Version UID"); dict.put(0x00089124, "Derivation Image Sequence"); dict.put(0x00089154, "Source Image Evidence Sequence"); dict.put(0x00089205, "Pixel Representation"); dict.put(0x00089206, "Volumetric Properties"); dict.put(0x00089207, "Volume Based Calculation Technique"); dict.put(0x00089208, "Complex Image Component"); dict.put(0x00089209, "Acquisition Contrast"); dict.put(0x00089215, "Derivation Code Sequence"); dict.put(0x00089237, "Reference Grayscale Presentation State"); dict.put(0x00100010, "Patient's Name"); dict.put(0x00100020, "Patient ID"); dict.put(0x00100021, "Issuer of Patient ID"); dict.put(0x00100030, "Patient's Birth Date"); dict.put(0x00100032, "Patient's Birth Time"); dict.put(0x00100040, "Patient's Sex"); dict.put(0x00100050, "Patient's Insurance Plane Code"); dict.put(0x00100101, "Patient's Primary Language Code"); dict.put(0x00100102, "Patient's Primary Language Modifier"); dict.put(0x00101000, "Other Patient IDs"); dict.put(0x00101001, "Other Patient Names"); dict.put(0x00101005, "Patient's Birth Name"); dict.put(0x00101010, "Patient's Age"); dict.put(0x00101020, "Patient's Size"); dict.put(0x00101030, "Patient's Weight"); dict.put(0x00101040, "Patient's Address"); dict.put(0x00101060, "Patient's Mother's Birth Name"); dict.put(0x00101080, "Military Rank"); dict.put(0x00101081, "Branch of Service"); dict.put(0x00101090, "Medical Record Locator"); dict.put(0x00102000, "Medical Alerts"); dict.put(0x00102110, "Contrast Allergies"); dict.put(0x00102150, "Country of Residence"); dict.put(0x00102152, "Region of Residence"); dict.put(0x00102154, "Patient's Telephone Numbers"); dict.put(0x00102160, "Ethnic Group"); dict.put(0x00102180, "Occupation"); dict.put(0x001021a0, "Smoking Status"); dict.put(0x001021b0, "Additional Patient History"); dict.put(0x001021c0, "Pregnancy Status"); dict.put(0x001021d0, "Last Menstrual Date"); dict.put(0x001021f0, "Patient's Religious Preference"); dict.put(0x00104000, "Patient Comments"); dict.put(0x00120010, "Clinical Trial Sponsor Name"); dict.put(0x00120020, "Clinical Trial Protocol ID"); dict.put(0x00120021, "Clinical Trial Protocol Name"); dict.put(0x00120030, "Clinical Trial Site ID"); dict.put(0x00120031, "Clinical Trial Site Name"); dict.put(0x00120040, "Clinical Trial Subject ID"); dict.put(0x00120042, "Clinical Trial Subject Reading ID"); dict.put(0x00120050, "Clinical Trial Time Point ID"); dict.put(0x00120051, "Clinical Trial Time Point Description"); dict.put(0x00120060, "Clinical Trial Coordinating Center"); dict.put(0x00180010, "Contrast/Bolus Agent"); dict.put(0x00180012, "Contrast/Bolus Agent Sequence"); dict.put(0x00180014, "Contrast/Bolus Admin. Route Sequence"); dict.put(0x00180015, "Body Part Examined"); dict.put(0x00180020, "Scanning Sequence"); dict.put(0x00180021, "Sequence Variant"); dict.put(0x00180022, "Scan Options"); dict.put(0x00180023, "MR Acquisition Type"); dict.put(0x00180024, "Sequence Name"); dict.put(0x00180025, "Angio Flag"); dict.put(0x00180026, "Intervention Drug Information Sequence"); dict.put(0x00180027, "Intervention Drug Stop Time"); dict.put(0x00180028, "Intervention Drug Dose"); dict.put(0x00180029, "Intervention Drug Sequence"); dict.put(0x0018002a, "Additional Drug Sequence"); dict.put(0x00180031, "Radiopharmaceutical"); dict.put(0x00180034, "Intervention Drug Name"); dict.put(0x00180035, "Intervention Drug Start Time"); dict.put(0x00180036, "Intervention Sequence"); dict.put(0x00180038, "Intervention Status"); dict.put(0x0018003a, "Intervention Description"); dict.put(0x00180040, "Cine Rate"); dict.put(0x00180050, "Slice Thickness"); dict.put(0x00180060, "KVP"); dict.put(0x00180070, "Counts Accumulated"); dict.put(0x00180071, "Acquisition Termination Condition"); dict.put(0x00180072, "Effective Duration"); dict.put(0x00180073, "Acquisition Start Condition"); dict.put(0x00180074, "Acquisition Start Condition Data"); dict.put(0x00180075, "Acquisition Termination Condition Data"); dict.put(0x00180080, "Repetition Time"); dict.put(0x00180081, "Echo Time"); dict.put(0x00180082, "Inversion Time"); dict.put(0x00180083, "Number of Averages"); dict.put(0x00180084, "Imaging Frequency"); dict.put(0x00180085, "Imaged Nucleus"); dict.put(0x00180086, "Echo Number(s)"); dict.put(0x00180087, "Magnetic Field Strength"); dict.put(0x00180088, "Spacing Between Slices"); dict.put(0x00180089, "Number of Phase Encoding Steps"); dict.put(0x00180090, "Data Collection Diameter"); dict.put(0x00180091, "Echo Train Length"); dict.put(0x00180093, "Percent Sampling"); dict.put(0x00180094, "Percent Phase Field of View"); dict.put(0x00180095, "Pixel Bandwidth"); dict.put(0x00181000, "Device Serial Number"); dict.put(0x00181004, "Plate ID"); dict.put(0x00181010, "Secondary Capture Device ID"); dict.put(0x00181011, "Hardcopy Creation Device ID"); dict.put(0x00181012, "Date of Secondary Capture"); dict.put(0x00181014, "Time of Secondary Capture"); dict.put(0x00181016, "Secondary Capture Device Manufacturer"); dict.put(0x00181017, "Hardcopy Device Manufacturer"); dict.put(0x00181018, "Secondary Capture Device Model Name"); dict.put(0x00181019, "Secondary Capture Device Software Version"); dict.put(0x0018101a, "Hardcopy Device Software Version"); dict.put(0x0018101b, "Hardcopy Device Model Name"); dict.put(0x00181020, "Software Version(s)"); dict.put(0x00181022, "Video Image Format Acquired"); dict.put(0x00181023, "Digital Image Format Acquired"); dict.put(0x00181030, "Protocol Name"); dict.put(0x00181040, "Contrast/Bolus Route"); dict.put(0x00181041, "Contrast/Bolus Volume"); dict.put(0x00181042, "Contrast/Bolus Start Time"); dict.put(0x00181043, "Contrast/Bolus Stop Time"); dict.put(0x00181044, "Contrast/Bolus Total Dose"); dict.put(0x00181045, "Syringe Counts"); dict.put(0x00181046, "Contrast Flow Rate"); dict.put(0x00181047, "Contrast Flow Duration"); dict.put(0x00181048, "Contrast/Bolus Ingredient"); dict.put(0x00181049, "Contrast Ingredient Concentration"); dict.put(0x00181050, "Spatial Resolution"); dict.put(0x00181060, "Trigger Time"); dict.put(0x00181061, "Trigger Source or Type"); dict.put(0x00181062, "Nominal Interval"); dict.put(0x00181063, "Frame Time"); dict.put(0x00181064, "Framing Type"); dict.put(0x00181065, "Frame Time Vector"); dict.put(0x00181066, "Frame Delay"); dict.put(0x00181067, "Image Trigger Delay"); dict.put(0x00181068, "Multiplex Group Time Offset"); dict.put(0x00181069, "Trigger Time Offset"); dict.put(0x0018106a, "Synchronization Trigger"); dict.put(0x0018106c, "Synchronization Channel"); dict.put(0x0018106e, "Trigger Sample Position"); dict.put(0x00181070, "Radiopharmaceutical Route"); dict.put(0x00181071, "Radiopharmaceutical Volume"); dict.put(0x00181072, "Radiopharmaceutical Start Time"); dict.put(0x00181073, "Radiopharmaceutical Stop Time"); dict.put(0x00181074, "Radionuclide Total Dose"); dict.put(0x00181075, "Radionuclide Half Life"); dict.put(0x00181076, "Radionuclide Positron Fraction"); dict.put(0x00181077, "Radiopharmaceutical Specific Activity"); dict.put(0x00181080, "Beat Rejection Flag"); dict.put(0x00181081, "Low R-R Value"); dict.put(0x00181082, "High R-R Value"); dict.put(0x00181083, "Intervals Acquired"); dict.put(0x00181084, "Intervals Rejected"); dict.put(0x00181085, "PVC Rejection"); dict.put(0x00181086, "Skip Beats"); dict.put(0x00181088, "Heart Rate"); dict.put(0x00181090, "Cardiac Number of Images"); dict.put(0x00181094, "Trigger Window"); dict.put(0x00181100, "Reconstruction Diameter"); dict.put(0x00181110, "Distance Source to Detector"); dict.put(0x00181111, "Distance Source to Patient"); dict.put(0x00181114, "Estimated Radiographic Mag. Factor"); dict.put(0x00181120, "Gantry/Detector Tilt"); dict.put(0x00181121, "Gantry/Detector Skew"); dict.put(0x00181130, "Table Height"); dict.put(0x00181131, "Table Traverse"); dict.put(0x00181134, "Table Motion"); dict.put(0x00181135, "Table Vertical Increment"); dict.put(0x00181136, "Table Lateral Increment"); dict.put(0x00181137, "Table Longitudinal Increment"); dict.put(0x00181138, "Table Angle"); dict.put(0x0018113a, "Table Type"); dict.put(0x00181140, "Rotation Direction"); dict.put(0x00181141, "Angular Position"); dict.put(0x00181142, "Radial Position"); dict.put(0x00181143, "Scan Arc"); dict.put(0x00181144, "Angular Step"); dict.put(0x00181145, "Center of Rotation Offset"); dict.put(0x00181147, "Field of View Shape"); dict.put(0x00181149, "Field of View Dimension(s)"); dict.put(0x00181150, "Exposure Time"); dict.put(0x00181151, "X-ray Tube Current"); dict.put(0x00181152, "Exposure"); dict.put(0x00181153, "Exposure in uAs"); dict.put(0x00181154, "Average Pulse Width"); dict.put(0x00181155, "Radiation Setting"); dict.put(0x00181156, "Rectification Type"); dict.put(0x0018115a, "Radiation Mode"); dict.put(0x0018115e, "Image Area Dose Product"); dict.put(0x00181160, "Filter Type"); dict.put(0x00181161, "Type of Filters"); dict.put(0x00181162, "Intensifier Size"); dict.put(0x00181164, "Imager Pixel Spacing"); dict.put(0x00181166, "Grid"); dict.put(0x00181170, "Generator Power"); dict.put(0x00181180, "Collimator/Grid Name"); dict.put(0x00181181, "Collimator Type"); dict.put(0x00181182, "Focal Distance"); dict.put(0x00181183, "X Focus Center"); dict.put(0x00181184, "Y Focus Center"); dict.put(0x00181190, "Focal Spot(s)"); dict.put(0x00181191, "Anode Target Material"); dict.put(0x001811a0, "Body Part Thickness"); dict.put(0x001811a2, "Compression Force"); dict.put(0x00181200, "Date of Last Calibration"); dict.put(0x00181201, "Time of Last Calibration"); dict.put(0x00181210, "Convolution Kernel"); dict.put(0x00181242, "Actual Frame Duration"); dict.put(0x00181243, "Count Rate"); dict.put(0x00181244, "Preferred Playback Sequencing"); dict.put(0x00181250, "Receive Coil Name"); dict.put(0x00181251, "Transmit Coil Name"); dict.put(0x00181260, "Plate Type"); dict.put(0x00181261, "Phosphor Type"); dict.put(0x00181300, "Scan Velocity"); dict.put(0x00181301, "Whole Body Technique"); dict.put(0x00181302, "Scan Length"); dict.put(0x00181310, "Acquisition Matrix"); dict.put(0x00181312, "In-plane Phase Encoding Direction"); dict.put(0x00181314, "Flip Angle"); dict.put(0x00181315, "Variable Flip Angle Flag"); dict.put(0x00181316, "SAR"); dict.put(0x00181318, "dB/dt"); dict.put(0x00181400, "Acquisition Device Processing Descr."); dict.put(0x00181401, "Acquisition Device Processing Code"); dict.put(0x00181402, "Cassette Orientation"); dict.put(0x00181403, "Cassette Size"); dict.put(0x00181404, "Exposures on Plate"); dict.put(0x00181405, "Relative X-ray Exposure"); dict.put(0x00181450, "Column Angulation"); dict.put(0x00181460, "Tomo Layer Height"); dict.put(0x00181470, "Tomo Angle"); dict.put(0x00181480, "Tomo Time"); dict.put(0x00181490, "Tomo Type"); dict.put(0x00181491, "Tomo Class"); dict.put(0x00181495, "Number of Tomosynthesis Source Images"); dict.put(0x00181500, "Positioner Motion"); dict.put(0x00181508, "Positioner Type"); dict.put(0x00181510, "Positioner Primary Angle"); dict.put(0x00181511, "Positioner Secondary Angle"); dict.put(0x00181520, "Positioner Primary Angle Increment"); dict.put(0x00181521, "Positioner Secondary Angle Increment"); dict.put(0x00181530, "Detector Primary Angle"); dict.put(0x00181531, "Detector Secondary Angle"); dict.put(0x00181600, "Shutter Shape"); dict.put(0x00181602, "Shutter Left Vertical Edge"); dict.put(0x00181604, "Shutter Right Vertical Edge"); dict.put(0x00181606, "Shutter Upper Horizontal Edge"); dict.put(0x00181608, "Shutter Lower Horizontal Edge"); dict.put(0x00181610, "Center of Circular Shutter"); dict.put(0x00181612, "Radius of Circular Shutter"); dict.put(0x00181620, "Vertices of the Polygonal Shutter"); dict.put(0x00181622, "Shutter Presentation Value"); dict.put(0x00181623, "Shutter Overlay Group"); dict.put(0x00181700, "Collimator Shape"); dict.put(0x00181702, "Collimator Left Vertical Edge"); dict.put(0x00181704, "Collimator Right Vertical Edge"); dict.put(0x00181706, "Collimator Upper Horizontal Edge"); dict.put(0x00181708, "Collimator Lower Horizontal Edge"); dict.put(0x00181710, "Center of Circular Collimator"); dict.put(0x00181712, "Radius of Circular Collimator"); dict.put(0x00181720, "Vertices of the polygonal Collimator"); dict.put(0x00181800, "Acquisition Time Synchronized"); dict.put(0x00181801, "Time Source"); dict.put(0x00181802, "Time Distribution Protocol"); dict.put(0x00181803, "NTP Source Address"); dict.put(0x00182001, "Page Number Vector"); dict.put(0x00182002, "Frame Label Vector"); dict.put(0x00182003, "Frame Primary Angle Vector"); dict.put(0x00182004, "Frame Secondary Angle Vector"); dict.put(0x00182005, "Slice Location Vector"); dict.put(0x00182006, "Display Window Label Vector"); dict.put(0x00182010, "Nominal Scanned Pixel Spacing"); dict.put(0x00182020, "Digitizing Device Transport Direction"); dict.put(0x00182030, "Rotation of Scanned Film"); dict.put(0x00183100, "IVUS Acquisition"); dict.put(0x00183101, "IVUS Pullback Rate"); dict.put(0x00183102, "IVUS Gated Rate"); dict.put(0x00183103, "IVUS Pullback Start Frame Number"); dict.put(0x00183104, "IVUS Pullback Stop Frame Number"); dict.put(0x00183105, "Lesion Number"); dict.put(0x00185000, "Output Power"); dict.put(0x00185010, "Transducer Data"); dict.put(0x00185012, "Focus Depth"); dict.put(0x00185020, "Processing Function"); dict.put(0x00185021, "Postprocessing Fuction"); dict.put(0x00185022, "Mechanical Index"); dict.put(0x00185024, "Bone Thermal Index"); dict.put(0x00185026, "Cranial Thermal Index"); dict.put(0x00185027, "Soft Tissue Thermal Index"); dict.put(0x00185028, "Soft Tissue-focus Thermal Index"); dict.put(0x00185029, "Soft Tissue-surface Thermal Index"); dict.put(0x00185050, "Depth of scan field"); dict.put(0x00185100, "Patient Position"); dict.put(0x00185101, "View Position"); dict.put(0x00185104, "Projection Eponymous Name Code"); dict.put(0x00186000, "Sensitivity"); dict.put(0x00186011, "Sequence of Ultrasound Regions"); dict.put(0x00186012, "Region Spatial Format"); dict.put(0x00186014, "Region Data Type"); dict.put(0x00186016, "Region Flags"); dict.put(0x00186018, "Region Location Min X0"); dict.put(0x0018601a, "Region Location Min Y0"); dict.put(0x0018601c, "Region Location Max X1"); dict.put(0x0018601e, "Region Location Max Y1"); dict.put(0x00186020, "Reference Pixel X0"); dict.put(0x00186022, "Reference Pixel Y0"); dict.put(0x00186024, "Physical Units X Direction"); dict.put(0x00186026, "Physical Units Y Direction"); dict.put(0x00186028, "Reference Pixel Physical Value X"); dict.put(0x0018602a, "Reference Pixel Physical Value Y"); dict.put(0x0018602c, "Physical Delta X"); dict.put(0x0018602e, "Physical Delta Y"); dict.put(0x00186030, "Transducer Frequency"); dict.put(0x00186031, "Transducer Type"); dict.put(0x00186032, "Pulse Repetition Frequency"); dict.put(0x00186034, "Doppler Correction Angle"); dict.put(0x00186036, "Steering Angle"); dict.put(0x00186039, "Doppler Sample Volume X Position"); dict.put(0x0018603b, "Doppler Sample Volume Y Position"); dict.put(0x0018603d, "TM-Line Position X0"); dict.put(0x0018603f, "TM-Line Position Y0"); dict.put(0x00186041, "TM-Line Position X1"); dict.put(0x00186043, "TM-Line Position Y1"); dict.put(0x00186044, "Pixel Component Organization"); dict.put(0x00186046, "Pixel Component Mask"); dict.put(0x00186048, "Pixel Component Range Start"); dict.put(0x0018604a, "Pixel Component Range Stop"); dict.put(0x0018604c, "Pixel Component Physical Units"); dict.put(0x0018604e, "Pixel Component Data Type"); dict.put(0x00186050, "Number of Table Break Points"); dict.put(0x00186052, "Table of X Break Points"); dict.put(0x00186054, "Table of Y Break Points"); dict.put(0x00186056, "Number of Table Entries"); dict.put(0x00186058, "Table of Pixel Values"); dict.put(0x0018605a, "Table of Parameter Values"); dict.put(0x00186060, "R Wave Time Vector"); dict.put(0x00187000, "Detector Conditions Nominal Flag"); dict.put(0x00187001, "Detector Temperature"); dict.put(0x00187004, "Detector Type"); dict.put(0x00187005, "Detector Configuration"); dict.put(0x00187006, "Detector Description"); dict.put(0x00187008, "Detector Mode"); dict.put(0x0018700a, "Detector ID"); dict.put(0x0018700c, "Date of Last Detector Calibration"); dict.put(0x0018700e, "Time of Last Detector Calibration"); dict.put(0x00187012, "Detector Time Since Last Exposure"); dict.put(0x00187014, "Detector Active Time"); dict.put(0x00187016, "Detector Activation Offset"); dict.put(0x0018701a, "Detector Binning"); dict.put(0x00187020, "Detector Element Physical Size"); dict.put(0x00187022, "Detector Element Spacing"); dict.put(0x00187024, "Detector Active Shape"); dict.put(0x00187026, "Detector Active Dimension(s)"); dict.put(0x00187028, "Detector Active Origin"); dict.put(0x0018702a, "Detector Manufacturer Name"); dict.put(0x0018702b, "Detector Model Name"); dict.put(0x00187030, "Field of View Origin"); dict.put(0x00187032, "Field of View Rotation"); dict.put(0x00187034, "Field of View Horizontal Flip"); dict.put(0x00187040, "Grid Absorbing Material"); dict.put(0x00187041, "Grid Spacing Material"); dict.put(0x00187042, "Grid Thickness"); dict.put(0x00187044, "Grid Pitch"); dict.put(0x00187046, "Grid Aspect Ratio"); dict.put(0x00187048, "Grid Period"); dict.put(0x0018704c, "Grid Focal Distance"); dict.put(0x00187050, "Filter Material"); dict.put(0x00187052, "Filter Thickness Min"); dict.put(0x00187054, "Filter Thickness Max"); dict.put(0x00187060, "Exposure Control Mode"); dict.put(0x0020000d, "Study Instance UID"); dict.put(0x0020000e, "Series Instance UID"); dict.put(0x00200011, "Series Number"); dict.put(0x00200012, "Acquisition Number"); dict.put(0x00200013, "Instance Number"); dict.put(0x00200020, "Patient Orientation"); dict.put(0x00200030, "Image Position"); dict.put(0x00200032, "Image Position (Patient)"); dict.put(0x00200037, "Image Orientation (Patient)"); dict.put(0x00200050, "Location"); dict.put(0x00200052, "Frame of Reference UID"); dict.put(0x00200070, "Image Geometry Type"); dict.put(0x00201001, "Acquisitions in Series"); dict.put(0x00201020, "Reference"); dict.put(0x00201041, "Slice Location"); // skipped a bunch of stuff here - not used dict.put(0x00280002, "Samples per pixel"); dict.put(0x00280003, "Samples per pixel used"); dict.put(0x00280004, "Photometric Interpretation"); dict.put(0x00280006, "Planar Configuration"); dict.put(0x00280008, "Number of frames"); dict.put(0x00280009, "Frame Increment Pointer"); dict.put(0x0028000a, "Frame Dimension Pointer"); dict.put(0x00280010, "Rows"); dict.put(0x00280011, "Columns"); dict.put(0x00280012, "Planes"); dict.put(0x00280014, "Ultrasound Color Data Present"); dict.put(0x00280030, "Pixel Spacing"); dict.put(0x00280031, "Zoom Factor"); dict.put(0x00280032, "Zoom Center"); dict.put(0x00280034, "Pixel Aspect Ratio"); dict.put(0x00280051, "Corrected Image"); dict.put(0x00280100, "Bits Allocated"); dict.put(0x00280101, "Bits Stored"); dict.put(0x00280102, "High Bit"); dict.put(0x00280103, "Pixel Representation"); dict.put(0x00280106, "Smallest Image Pixel Value"); dict.put(0x00280107, "Largest Image Pixel Value"); dict.put(0x00280108, "Smallest Pixel Value in Series"); dict.put(0x00280109, "Largest Pixel Value in Series"); dict.put(0x00280110, "Smallest Image Pixel Value in Plane"); dict.put(0x00280111, "Largest Image Pixel Value in Plane"); dict.put(0x00280120, "Pixel Padding Value"); dict.put(0x00280300, "Quality Control Image"); dict.put(0x00280301, "Burned in Annotation"); dict.put(0x00281040, "Pixel Intensity Relationship"); dict.put(0x00281041, "Pixel Intensity Relationship Sign"); dict.put(0x00281050, "Window Center"); dict.put(0x00281051, "Window Width"); dict.put(0x00281052, "Rescale Intercept"); dict.put(0x00281053, "Rescale Slope"); dict.put(0x00281054, "Rescale Type"); dict.put(0x00281055, "Window Center and Width Explanation"); dict.put(0x00281090, "Recommended Viewing Mode"); dict.put(0x00281101, "Red Palette Color LUT Descriptor"); dict.put(0x00281102, "Green Palette Color LUT Descriptor"); dict.put(0x00281103, "Blue Palette Color LUT Descriptor"); dict.put(0x00281199, "Palette Color LUT UID"); dict.put(0x00281201, "Red Palette Color LUT Data"); dict.put(0x00281202, "Green Palette Color LUT Data"); dict.put(0x00281203, "Blue Palette Color LUT Data"); dict.put(0x00281221, "Segmented Red Palette Color LUT Data"); dict.put(0x00281222, "Segmented Green Palette Color LUT Data"); dict.put(0x00281223, "Segmented Blue Palette Color LUT Data"); dict.put(0x00281300, "Implant Present"); dict.put(0x00281350, "Partial View"); dict.put(0x00281351, "Partial View Description"); dict.put(0x00282110, "Lossy Image Compression"); dict.put(0x00282112, "Lossy Image Compression Ratio"); dict.put(0x00282114, "Lossy Image Compression Method"); dict.put(0x00283000, "Modality LUT Sequence"); dict.put(0x00283002, "LUT Descriptor"); dict.put(0x00283003, "LUT Explanation"); dict.put(0x00283004, "Modality LUT Type"); dict.put(0x00283006, "LUT Data"); dict.put(0x00283010, "VOI LUT Sequence"); dict.put(0x00283110, "Softcopy VOI LUT Sequence"); dict.put(0x00285000, "Bi-Plane Acquisition Sequence"); dict.put(0x00286010, "Representative Frame Number"); dict.put(0x00286020, "Frame Numbers of Interest (FOI)"); dict.put(0x00286022, "Frame(s) of Interest Description"); dict.put(0x00286023, "Frame of Interest Type"); dict.put(0x00286040, "R Wave Pointer"); dict.put(0x00286100, "Mask Subtraction Sequence"); dict.put(0x00286101, "Mask Operation"); dict.put(0x00286102, "Applicable Frame Range"); dict.put(0x00286110, "Mask Frame Numbers"); dict.put(0x00286112, "Contrast Frame Averaging"); dict.put(0x00286114, "Mask Sub-pixel Shift"); dict.put(0x00286120, "TID Offset"); dict.put(0x00286190, "Mask Operation Explanation"); dict.put(0x00289001, "Data Point Rows"); dict.put(0x00289002, "Data Point Columns"); dict.put(0x00289003, "Signal Domain Columns"); dict.put(0x00289108, "Data Representation"); dict.put(0x00289110, "Pixel Measures Sequence"); dict.put(0x00289132, "Frame VOI LUT Sequence"); dict.put(0x00289145, "Pixel Value Transformation Sequence"); dict.put(0x00289235, "Signal Domain Rows"); // skipping some more stuff dict.put(0x00540011, "Number of Energy Windows"); dict.put(0x00540021, "Number of Detectors"); dict.put(0x00540051, "Number of Rotations"); dict.put(0x00540080, "Slice Vector"); dict.put(0x00540081, "Number of Slices"); dict.put(0x00540202, "Type of Detector Motion"); dict.put(0x00540400, "Image ID"); dict.put(0x20100100, "Border Density"); return dict; } // -- Nested Classes -- public static class Metadata extends AbstractMetadata implements HasColorTable { // -- Fields -- byte[][] lut = null; short[][] shortLut = null; private ColorTable8 lut8; private ColorTable16 lut16; private long[] offsets = null; private boolean isJP2K = false; private boolean isJPEG = false; private boolean isRLE = false; private boolean isDeflate = false; private boolean oddLocations = false; private int maxPixelValue; private int imagesPerFile = 0; private double rescaleSlope = 1.0, rescaleIntercept = 0.0; private Hashtable<Integer, Vector<String>> fileList; private boolean inverted = false; private String pixelSizeX, pixelSizeY; private Double pixelSizeZ; private String date, time, imageType; private String originalDate, originalTime, originalInstance; private int originalSeries; private Vector<String> companionFiles = new Vector<String>(); // Getters and Setters public long[] getOffsets() { return offsets; } public void setOffsets(final long[] offsets) { this.offsets = offsets; } public double getRescaleSlope() { return rescaleSlope; } public void setRescaleSlope(final double rescaleSlope) { this.rescaleSlope = rescaleSlope; } public double getRescaleIntercept() { return rescaleIntercept; } public void setRescaleIntercept(final double rescaleIntercept) { this.rescaleIntercept = rescaleIntercept; } public String getPixelSizeX() { return pixelSizeX; } public void setPixelSizeX(final String pixelSizeX) { this.pixelSizeX = pixelSizeX; } public String getPixelSizeY() { return pixelSizeY; } public void setPixelSizeY(final String pixelSizeY) { this.pixelSizeY = pixelSizeY; } public Double getPixelSizeZ() { return pixelSizeZ; } public void setPixelSizeZ(final Double pixelSizeZ) { this.pixelSizeZ = pixelSizeZ; } public boolean isInverted() { return inverted; } public void setInverted(final boolean inverted) { this.inverted = inverted; } public boolean isJP2K() { return isJP2K; } public void setJP2K(final boolean isJP2K) { this.isJP2K = isJP2K; } public boolean isJPEG() { return isJPEG; } public void setJPEG(final boolean isJPEG) { this.isJPEG = isJPEG; } public boolean isRLE() { return isRLE; } public void setRLE(final boolean isRLE) { this.isRLE = isRLE; } public boolean isDeflate() { return isDeflate; } public void setDeflate(final boolean isDeflate) { this.isDeflate = isDeflate; } public boolean isOddLocations() { return oddLocations; } public void setOddLocations(final boolean oddLocations) { this.oddLocations = oddLocations; } public int getMaxPixelValue() { return maxPixelValue; } public void setMaxPixelValue(final int maxPixelValue) { this.maxPixelValue = maxPixelValue; } public int getImagesPerFile() { return imagesPerFile; } public void setImagesPerFile(final int imagesPerFile) { this.imagesPerFile = imagesPerFile; } public Hashtable<Integer, Vector<String>> getFileList() { return fileList; } public void setFileList(final Hashtable<Integer, Vector<String>> fileList) { this.fileList = fileList; } public String getDate() { return date; } public void setDate(final String date) { this.date = date; } public String getTime() { return time; } public void setTime(final String time) { this.time = time; } public String getImageType() { return imageType; } public void setImageType(final String imageType) { this.imageType = imageType; } public String getOriginalDate() { return originalDate; } public void setOriginalDate(final String originalDate) { this.originalDate = originalDate; } public String getOriginalTime() { return originalTime; } public void setOriginalTime(final String originalTime) { this.originalTime = originalTime; } public String getOriginalInstance() { return originalInstance; } public void setOriginalInstance(final String originalInstance) { this.originalInstance = originalInstance; } public int getOriginalSeries() { return originalSeries; } public void setOriginalSeries(final int originalSeries) { this.originalSeries = originalSeries; } public Vector<String> getCompanionFiles() { return companionFiles; } public void setCompanionFiles(final Vector<String> companionFiles) { this.companionFiles = companionFiles; } // -- ColorTable API Methods -- @Override public ColorTable getColorTable(final int imageIndex, final long planeIndex) { final int pixelType = get(0).getPixelType(); switch (pixelType) { case FormatTools.INT8: case FormatTools.UINT8: if (lut != null && lut8 == null) lut8 = new ColorTable8(lut); return lut8; case FormatTools.INT16: case FormatTools.UINT16: if (shortLut != null && lut16 == null) lut16 = new ColorTable16(shortLut); return lut16; } return null; } // -- Metadata API Methods -- @Override public void populateImageMetadata() { log().info("Populating metadata"); // TODO this isn't going to work because each parsing will // get the same filelist size and repeat infinitely final int seriesCount = fileList.size(); final Integer[] keys = fileList.keySet().toArray(new Integer[0]); Arrays.sort(keys); for (int i = 0; i < seriesCount; i++) { get(i).setAxisTypes(Axes.X, Axes.Y); int sizeZ = 0; if (seriesCount == 1) { sizeZ = getOffsets().length * fileList.get(keys[i]).size(); get(i).setMetadataComplete(true); get(i).setFalseColor(false); if (isRLE) { get(i).setAxisTypes(Axes.X, Axes.Y, Axes.CHANNEL); } if (get(i).getAxisLength(Axes.CHANNEL) > 1) { get(i).setPlanarAxisCount(3); } else { get(i).setPlanarAxisCount(2); } } else { try { final Parser p = (Parser) getFormat().createParser(); final Metadata m = p.parse(fileList.get(keys[i]).get(0), new SCIFIOConfig() .groupableSetGroupFiles(false)); add(m.get(0)); sizeZ *= fileList.get(keys[i]).size(); } catch (final IOException e) { log().error("Error creating Metadata from DICOM companion files.", e); } catch (final FormatException e) { log().error("Error creating Metadata from DICOM companion files.", e); } } get(i).setAxisLength(Axes.Z, sizeZ); } } // -- HasSource API Methods -- @Override public void close(final boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { oddLocations = false; isJPEG = isJP2K = isRLE = isDeflate = false; lut = null; offsets = null; shortLut = null; maxPixelValue = 0; rescaleSlope = 1.0; rescaleIntercept = 0.0; pixelSizeX = pixelSizeY = null; pixelSizeZ = null; imagesPerFile = 0; fileList = null; inverted = false; date = time = imageType = null; originalDate = originalTime = originalInstance = null; originalSeries = 0; // TODO the resetting is a bit too aggressive, perhaps it should just // clear out fields.. // companionFiles.clear(); } } } public static class Checker extends AbstractChecker { // -- Constants -- private static final String[] DICOM_SUFFIXES = { "dic", "dcm", "dicom", "j2ki", "j2kr" }; // -- Checker API Methods -- @Override public boolean suffixNecessary() { return false; } @Override public boolean suffixSufficient() { return false; } @Override public boolean isFormat(final String name, final SCIFIOConfig config) { // extension is sufficient as long as it is DIC, DCM, DICOM, J2KI, or J2KR if (FormatTools.checkSuffix(name, DICOM_SUFFIXES)) return true; return super.isFormat(name, config); } @Override public boolean isFormat(final RandomAccessInputStream stream) throws IOException { final int blockLen = 2048; if (!FormatTools.validStream(stream, blockLen, true)) return false; stream.seek(128); if (stream.readString(4).equals(DICOM_MAGIC_STRING)) return true; stream.seek(0); try { final int tag = DICOMUtils.getNextTag(stream).get(); return TYPES.get(tag) != null; } catch (final NullPointerException e) {} catch (final FormatException e) {} return false; } } public static class Parser extends AbstractParser<Metadata> { // -- Constants -- private static final int PIXEL_REPRESENTATION = 0x00280103; private static final int PIXEL_SIGN = 0x00281041; private static final int TRANSFER_SYNTAX_UID = 0x00020010; private static final int SLICE_SPACING = 0x00180088; private static final int SAMPLES_PER_PIXEL = 0x00280002; private static final int PHOTOMETRIC_INTERPRETATION = 0x00280004; private static final int PLANAR_CONFIGURATION = 0x00280006; private static final int NUMBER_OF_FRAMES = 0x00280008; private static final int ROWS = 0x00280010; private static final int COLUMNS = 0x00280011; private static final int PIXEL_SPACING = 0x00280030; private static final int BITS_ALLOCATED = 0x00280100; private static final int WINDOW_CENTER = 0x00281050; private static final int WINDOW_WIDTH = 0x00281051; private static final int RESCALE_INTERCEPT = 0x00281052; private static final int RESCALE_SLOPE = 0x00281053; private static final int ICON_IMAGE_SEQUENCE = 0x00880200; private static final int ITEM = 0xFFFEE000; private static final int ITEM_DELIMINATION = 0xFFFEE00D; private static final int SEQUENCE_DELIMINATION = 0xFFFEE0DD; private static final int PIXEL_DATA = 0x7FE00010; @Parameter private CodecService codecService; // -- Parser API Methods -- @Override public int fileGroupOption(final String id) throws FormatException, IOException { return FormatTools.CAN_GROUP; } @Override protected void typedParse(final RandomAccessInputStream stream, final Metadata meta, final SCIFIOConfig config) throws IOException, FormatException { meta.createImageMetadata(1); stream.order(true); final ImageMetadata iMeta = meta.get(0); // look for companion files final Vector<String> companionFiles = new Vector<String>(); attachCompanionFiles(companionFiles); meta.setCompanionFiles(companionFiles); int location = 0; boolean isJP2K = false; boolean isJPEG = false; boolean isRLE = false; boolean isDeflate = false; boolean oddLocations = false; int maxPixelValue = -1; int imagesPerFile = 0; boolean bigEndianTransferSyntax = false; long[] offsets = null; int sizeX = 0; int sizeY = 0; int bitsPerPixel = 0; boolean interleaved; // some DICOM files have a 128 byte header followed by a 4 byte identifier log().info("Verifying DICOM format"); final MetadataLevel level = config.parserGetLevel(); getSource().seek(128); if (getSource().readString(4).equals("DICM")) { if (level != MetadataLevel.MINIMUM) { // header exists, so we'll read it getSource().seek(0); meta.getTable() .put("Header information", getSource().readString(128)); getSource().skipBytes(4); } location = 128; } else getSource().seek(0); log().info("Reading tags"); long baseOffset = 0; boolean decodingTags = true; boolean signed = false; while (decodingTags) { if (getSource().getFilePointer() + 4 >= getSource().length()) { break; } log().debug("Reading tag from " + getSource().getFilePointer()); final DICOMTag tag = DICOMUtils.getNextTag(getSource(), bigEndianTransferSyntax, oddLocations); iMeta.setLittleEndian(tag.isLittleEndian()); if (tag.getElementLength() <= 0) continue; oddLocations = (location & 1) != 0; log().debug( " tag=" + tag.get() + " len=" + tag.getElementLength() + " fp=" + getSource().getFilePointer()); String s = null; switch (tag.get()) { case TRANSFER_SYNTAX_UID: // this tag can indicate which compression scheme is used s = getSource().readString(tag.getElementLength()); addInfo(meta, tag, s); if (s.startsWith("1.2.840.10008.1.2.4.9")) isJP2K = true; else if (s.startsWith("1.2.840.10008.1.2.4")) isJPEG = true; else if (s.startsWith("1.2.840.10008.1.2.5")) isRLE = true; else if (s.equals("1.2.8.10008.1.2.1.99")) isDeflate = true; else if (s.contains("1.2.4") || s.contains("1.2.5")) { throw new UnsupportedCompressionException( "Sorry, compression type " + s + " not supported"); } if (s.contains("1.2.840.10008.1.2.2")) { bigEndianTransferSyntax = true; } break; case NUMBER_OF_FRAMES: s = getSource().readString(tag.getElementLength()); addInfo(meta, tag, s); final double frames = Double.parseDouble(s); if (frames > 1.0) imagesPerFile = (int) frames; break; case SAMPLES_PER_PIXEL: addInfo(meta, tag, getSource().readShort()); break; case PLANAR_CONFIGURATION: final int configuration = getSource().readShort(); interleaved = configuration == 0; if (interleaved) { iMeta.setAxisTypes(Axes.CHANNEL, Axes.X, Axes.Y); iMeta.setPlanarAxisCount(3); } addInfo(meta, tag, configuration); break; case ROWS: if (sizeY == 0) { sizeY = getSource().readShort(); iMeta.addAxis(Axes.Y, sizeY); } else getSource().skipBytes(2); addInfo(meta, tag, sizeY); break; case COLUMNS: if (sizeX == 0) { sizeX = getSource().readShort(); iMeta.addAxis(Axes.X, sizeX); } else getSource().skipBytes(2); addInfo(meta, tag, sizeX); break; case PHOTOMETRIC_INTERPRETATION: case PIXEL_SPACING: case SLICE_SPACING: case RESCALE_INTERCEPT: case WINDOW_CENTER: case RESCALE_SLOPE: addInfo(meta, tag, getSource().readString(tag.getElementLength())); break; case BITS_ALLOCATED: if (bitsPerPixel == 0) bitsPerPixel = getSource().readShort(); else getSource().skipBytes(2); addInfo(meta, tag, bitsPerPixel); break; case PIXEL_REPRESENTATION: case PIXEL_SIGN: final short ss = getSource().readShort(); signed = ss == 1; addInfo(meta, tag, ss); break; case 537262910: case WINDOW_WIDTH: final String t = getSource().readString(tag.getElementLength()); if (t.trim().length() == 0) maxPixelValue = -1; else { try { maxPixelValue = new Double(t.trim()).intValue(); } catch (final NumberFormatException e) { maxPixelValue = -1; } } addInfo(meta, tag, t); break; case PIXEL_DATA: case ITEM: case 0xffee000: if (tag.getElementLength() != 0) { baseOffset = getSource().getFilePointer(); addInfo(meta, tag, location); decodingTags = false; } else addInfo(meta, tag, null); break; case 0x7f880010: if (tag.getElementLength() != 0) { baseOffset = location + 4; decodingTags = false; } break; case 0x7fe00000: getSource().skipBytes(tag.getElementLength()); break; case 0: getSource().seek(getSource().getFilePointer() - 4); break; default: final long oldfp = getSource().getFilePointer(); addInfo(meta, tag, s); getSource().seek(oldfp + tag.getElementLength()); } if (getSource().getFilePointer() >= (getSource().length() - 4)) { decodingTags = false; } } if (imagesPerFile == 0) imagesPerFile = 1; int bpp = bitsPerPixel; while (bitsPerPixel % 8 != 0) bitsPerPixel++; if (bitsPerPixel == 24 || bitsPerPixel == 48) { bitsPerPixel /= 3; bpp /= 3; } final int pixelType = FormatTools.pixelTypeFromBytes(bitsPerPixel / 8, signed, false); iMeta.setBitsPerPixel(bpp); iMeta.setPixelType(pixelType); final int bytesPerPixel = FormatTools.getBytesPerPixel(pixelType); final int planeSize = sizeX * sizeY * (int) (meta.getColorTable(0, 0) == null ? meta.get(0).getAxisLength( Axes.CHANNEL) : 1) * bytesPerPixel; meta.setJP2K(isJP2K); meta.setJPEG(isJPEG); meta.setImagesPerFile(imagesPerFile); meta.setRLE(isRLE); meta.setDeflate(isDeflate); meta.setMaxPixelValue(maxPixelValue); meta.setOddLocations(oddLocations); log().info("Calculating image offsets"); // calculate the offset to each plane getSource().seek(baseOffset - 12); final int len = getSource().readInt(); if (len >= 0 && len + getSource().getFilePointer() < getSource().length()) { getSource().skipBytes(len); final int check = getSource().readShort() & 0xffff; if (check == 0xfffe) { baseOffset = getSource().getFilePointer() + 2; } } offsets = new long[imagesPerFile]; meta.setOffsets(offsets); for (int i = 0; i < imagesPerFile; i++) { if (isRLE) { if (i == 0) getSource().seek(baseOffset); else { getSource().seek(offsets[i - 1]); final CodecOptions options = new CodecOptions(); options.maxBytes = planeSize / bytesPerPixel; for (int q = 0; q < bytesPerPixel; q++) { final PackbitsCodec codec = codecService.getCodec(PackbitsCodec.class); codec.decompress(getSource(), options); while (getSource().read() == 0) { /* Read to non-0 data */} getSource().seek(getSource().getFilePointer() - 1); } } getSource().skipBytes(i == 0 ? 64 : 53); while (getSource().read() == 0) { /* Read to non-0 data */} offsets[i] = getSource().getFilePointer() - 1; } else if (isJPEG || isJP2K) { // scan for next JPEG magic byte sequence if (i == 0) offsets[i] = baseOffset; else offsets[i] = offsets[i - 1] + 3; final byte secondCheck = isJPEG ? (byte) 0xd8 : (byte) 0x4f; getSource().seek(offsets[i]); final byte[] buf = new byte[8192]; int n = getSource().read(buf); boolean found = false; while (!found) { for (int q = 0; q < n - 2; q++) { if (buf[q] == (byte) 0xff && buf[q + 1] == secondCheck && buf[q + 2] == (byte) 0xff) { if (isJPEG || (isJP2K && buf[q + 3] == 0x51)) { found = true; offsets[i] = getSource().getFilePointer() + q - n; break; } } } if (!found) { for (int q = 0; q < 4; q++) { buf[q] = buf[buf.length + q - 4]; } n = getSource().read(buf, 4, buf.length - 4) + 4; } } } else offsets[i] = baseOffset + planeSize * i; } makeFileList(config); } @Override public String[] getImageUsedFiles(final int imageIndex, final boolean noPixels) { FormatTools.assertId(getSource(), true, 1); if (noPixels || getMetadata().getFileList() == null) return null; final Integer[] keys = getMetadata().getFileList().keySet().toArray(new Integer[0]); Arrays.sort(keys); final Vector<String> files = getMetadata().getFileList().get(keys[imageIndex]); for (final String f : getMetadata().getCompanionFiles()) { files.add(f); } return files == null ? null : files.toArray(new String[files.size()]); } // -- Helper methods -- private void makeFileList(final SCIFIOConfig config) throws FormatException, IOException { log().info("Building file list"); if (getMetadata().getFileList() == null && getMetadata().getOriginalInstance() != null && getMetadata().getOriginalDate() != null && getMetadata().getOriginalTime() != null && config.groupableIsGroupFiles()) { final Hashtable<Integer, Vector<String>> fileList = new Hashtable<Integer, Vector<String>>(); final Integer s = new Integer(getMetadata().getOriginalSeries()); fileList.put(s, new Vector<String>()); final int instanceNumber = Integer.parseInt(getMetadata().getOriginalInstance()) - 1; if (instanceNumber == 0) fileList.get(s).add(getSource().getFileName()); else { while (instanceNumber > fileList.get(s).size()) { fileList.get(s).add(null); } fileList.get(s).add(getSource().getFileName()); } // look for matching files in the current directory final Location currentFile = new Location(getContext(), getSource().getFileName()) .getAbsoluteFile(); Location directory = currentFile.getParentFile(); scanDirectory(directory, false); // move up a directory and look for other directories that // could contain matching files directory = directory.getParentFile(); final String[] subdirs = directory.list(true); if (subdirs != null) { for (final String subdir : subdirs) { final Location f = new Location(getContext(), directory, subdir).getAbsoluteFile(); if (!f.isDirectory()) continue; scanDirectory(f, true); } } final Integer[] keys = fileList.keySet().toArray(new Integer[0]); Arrays.sort(keys); for (final Integer key : keys) { for (int j = 0; j < fileList.get(key).size(); j++) { if (fileList.get(key).get(j) == null) { fileList.get(key).remove(j); j--; } } } getMetadata().setFileList(fileList); } else if (getMetadata().getFileList() == null) { final Hashtable<Integer, Vector<String>> fileList = new Hashtable<Integer, Vector<String>>(); fileList.put(0, new Vector<String>()); fileList.get(0).add(getSource().getFileName()); getMetadata().setFileList(fileList); } } /** * DICOM datasets produced by: * http://www.ct-imaging.de/index.php/en/ct-systeme-e/mikro-ct-e.html * contain a bunch of extra metadata and log files. We do not parse these * extra files, but do locate and attach them to the DICOM file(s). */ private void attachCompanionFiles(final Vector<String> companionFiles) { final Location parent = new Location(getContext(), getSource().getFileName()).getAbsoluteFile() .getParentFile(); final Location grandparent = parent.getParentFile(); if (new Location(getContext(), grandparent, parent.getName() + ".mif") .exists()) { final String[] list = grandparent.list(true); for (final String f : list) { final Location file = new Location(getContext(), grandparent, f); if (!file.isDirectory()) { companionFiles.add(file.getAbsolutePath()); } } } } /** * Scan the given directory for files that belong to this dataset. */ private void scanDirectory(final Location dir, final boolean checkSeries) throws FormatException, IOException { final Location currentFile = new Location(getContext(), getSource().getFileName()).getAbsoluteFile(); final FilePattern pattern = new FilePattern(getContext(), currentFile.getName(), dir .getAbsolutePath()); String[] patternFiles = pattern.getFiles(); if (patternFiles == null) patternFiles = new String[0]; Arrays.sort(patternFiles); final String[] files = dir.list(true); if (files == null) return; Arrays.sort(files); for (final String f : files) { final String file = new Location(getContext(), dir, f).getAbsolutePath(); log().debug("Checking file " + file); if (!f.equals(getSource().getFileName()) && !file.equals(getSource().getFileName()) && getFormat().createChecker().isFormat(file) && Arrays.binarySearch(patternFiles, file) >= 0) { addFileToList(file, checkSeries); } } } /** * Determine if the given file belongs in the same dataset as this file. */ private void addFileToList(final String file, final boolean checkSeries) throws FormatException, IOException { final RandomAccessInputStream stream = new RandomAccessInputStream(getContext(), file); if (!getFormat().createChecker().isFormat(stream)) { stream.close(); return; } stream.order(true); stream.seek(128); if (!stream.readString(4).equals("DICM")) stream.seek(0); int fileSeries = -1; String date = null, time = null, instance = null; while (date == null || time == null || instance == null || (checkSeries && fileSeries < 0)) { final long fp = stream.getFilePointer(); if (fp + 4 >= stream.length() || fp < 0) break; final DICOMTag tag = DICOMUtils.getNextTag(stream); final String key = TYPES.get(new Integer(tag.get())); if ("Instance Number".equals(key)) { instance = stream.readString(tag.getElementLength()).trim(); if (instance.length() == 0) instance = null; } else if ("Acquisition Time".equals(key)) { time = stream.readString(tag.getElementLength()); } else if ("Acquisition Date".equals(key)) { date = stream.readString(tag.getElementLength()); } else if ("Series Number".equals(key)) { fileSeries = Integer.parseInt(stream.readString(tag.getElementLength()).trim()); } else stream.skipBytes(tag.getElementLength()); } stream.close(); if (date == null || time == null || instance == null || (checkSeries && fileSeries == getMetadata().getOriginalSeries())) { return; } int stamp = 0; try { stamp = Integer.parseInt(time); } catch (final NumberFormatException e) {} int timestamp = 0; try { timestamp = Integer.parseInt(getMetadata().getOriginalTime()); } catch (final NumberFormatException e) {} if (date.equals(getMetadata().getOriginalDate()) && (Math.abs(stamp - timestamp) < 150)) { int position = Integer.parseInt(instance) - 1; if (position < 0) position = 0; final Hashtable<Integer, Vector<String>> fileList = getMetadata().getFileList(); if (fileList.get(fileSeries) == null) { fileList.put(fileSeries, new Vector<String>()); } if (position < fileList.get(fileSeries).size()) { while (position < fileList.get(fileSeries).size() && fileList.get(fileSeries).get(position) != null) { position++; } if (position < fileList.get(fileSeries).size()) { fileList.get(fileSeries).setElementAt(file, position); } else fileList.get(fileSeries).add(file); } else { while (position > fileList.get(fileSeries).size()) { fileList.get(fileSeries).add(null); } fileList.get(fileSeries).add(file); } } } private void addInfo(final Metadata meta, final DICOMTag tag, final String value) throws IOException { final String oldValue = value; String info = getHeaderInfo(tag, value); if (info != null && tag.get() != ITEM) { info = info.trim(); if (info.equals("")) info = oldValue == null ? "" : oldValue.trim(); String key = TYPES.get(tag.get()); if (key == null) { key = formatTag(tag.get()); } if (key.equals("Samples per pixel")) { final int sizeC = Integer.parseInt(info); if (sizeC > 1) { meta.get(0).setAxisLength(Axes.CHANNEL, sizeC); meta.get(0).setPlanarAxisCount(2); } } else if (key.equals("Photometric Interpretation")) { if (info.equals("PALETTE COLOR")) { meta.get(0).setIndexed(true); meta.get(0).setAxisLength(Axes.CHANNEL, 1); meta.lut = new byte[3][]; meta.shortLut = new short[3][]; } else if (info.startsWith("MONOCHROME")) { meta.setInverted(info.endsWith("1")); } } else if (key.equals("Acquisition Date")) meta.setOriginalDate(info); else if (key.equals("Acquisition Time")) meta.setOriginalTime(info); else if (key.equals("Instance Number")) { if (info.trim().length() > 0) { meta.setOriginalInstance(info); } } else if (key.equals("Series Number")) { try { meta.setOriginalSeries(Integer.parseInt(info)); } catch (final NumberFormatException e) {} } else if (key.contains("Palette Color LUT Data")) { final String color = key.substring(0, key.indexOf(" ")).trim(); final int ndx = color.equals("Red") ? 0 : color.equals("Green") ? 1 : 2; final long fp = getSource().getFilePointer(); getSource().seek( getSource().getFilePointer() - tag.getElementLength() + 1); meta.shortLut[ndx] = new short[tag.getElementLength() / 2]; meta.lut[ndx] = new byte[tag.getElementLength() / 2]; for (int i = 0; i < meta.lut[ndx].length; i++) { meta.shortLut[ndx][i] = getSource().readShort(); meta.lut[ndx][i] = (byte) (meta.shortLut[ndx][i] & 0xff); } getSource().seek(fp); } else if (key.equals("Content Time")) meta.setTime(info); else if (key.equals("Content Date")) meta.setDate(info); else if (key.equals("Image Type")) meta.setImageType(info); else if (key.equals("Rescale Intercept")) { meta.setRescaleIntercept(Double.parseDouble(info)); } else if (key.equals("Rescale Slope")) { meta.setRescaleSlope(Double.parseDouble(info)); } else if (key.equals("Pixel Spacing")) { meta.setPixelSizeX(info.substring(0, info.indexOf("\\"))); meta.setPixelSizeY(info.substring(info.lastIndexOf("\\") + 1)); } else if (key.equals("Spacing Between Slices")) { meta.setPixelSizeZ(new Double(info)); } if (((tag.get() & 0xffff0000) >> 16) != 0x7fe0) { key = formatTag(tag.get()) + " " + key; final int imageIndex = meta.getImageCount() - 1; Object v; if ((v = meta.get(imageIndex).getTable().get(key)) != null) { // make sure that values are not overwritten meta.get(imageIndex).getTable().remove(key); meta.get(imageIndex).getTable().putList(key, v); meta.get(imageIndex).getTable().putList(key, info); } else { meta.get(imageIndex).getTable().put(key, info); } } } } private String formatTag(final int tag) { String s = Integer.toHexString(tag); while (s.length() < 8) { s = "0" + s; } return s.substring(0, 4) + "," + s.substring(4); } private void addInfo(final Metadata meta, final DICOMTag tag, final int value) throws IOException { addInfo(meta, tag, Integer.toString(value)); } private String getHeaderInfo(final DICOMTag tag, String value) throws IOException { if (tag.get() == ITEM_DELIMINATION || tag.get() == SEQUENCE_DELIMINATION) { tag.setInSequence(false); } String id = TYPES.get(new Integer(tag.get())); int vr = tag.getVR(); if (id != null) { if (vr == DICOMUtils.IMPLICIT_VR) { vr = (id.charAt(0) << 8) + id.charAt(1); tag.setVR(vr); } if (id.length() > 2) id = id.substring(2); } if (tag.get() == ITEM) return id != null ? id : null; if (value != null) return value; boolean skip = false; switch (vr) { case DICOMUtils.AE: case DICOMUtils.AS: case DICOMUtils.AT: // Cannot fix element length to 4, because AT value representation is // always // 4 bytes long (DICOM specs PS3.5 §6.2), but value multiplicity is // 1-n byte[] bytes = new byte[tag.getElementLength()]; // Read from stream getSource().readFully(bytes); // If little endian, swap bytes to get a string with a user friendly // representation of tag group and tag element if (tag.littleEndian) { for (int i = 0; i < bytes.length / 2; ++i) { byte t = bytes[2 * i]; bytes[2 * i] = bytes[2 * i + 1]; bytes[2 * i + 1] = t; } } // Convert the bytes to a string value = DataTools.bytesToHex(bytes); break; case DICOMUtils.CS: case DICOMUtils.DA: case DICOMUtils.DS: case DICOMUtils.DT: case DICOMUtils.IS: case DICOMUtils.LO: case DICOMUtils.LT: case DICOMUtils.PN: case DICOMUtils.SH: case DICOMUtils.ST: case DICOMUtils.TM: case DICOMUtils.UI: value = getSource().readString(tag.getElementLength()); break; case DICOMUtils.US: if (tag.getElementLength() == 2) value = Integer.toString(getSource().readShort()); else { value = ""; final int n = tag.getElementLength() / 2; for (int i = 0; i < n; i++) { value += Integer.toString(getSource().readShort()) + " "; } } break; case DICOMUtils.IMPLICIT_VR: value = getSource().readString(tag.getElementLength()); if (tag.getElementLength() <= 4 || tag.getElementLength() > 44) value = null; break; case DICOMUtils.SQ: value = ""; final boolean privateTag = ((tag.getElementLength() >> 16) & 1) != 0; if (tag.get() == ICON_IMAGE_SEQUENCE || privateTag) skip = true; break; default: skip = true; } if (skip) { final long skipCount = tag.getElementLength(); if (getSource().getFilePointer() + skipCount <= getSource().length()) { getSource().skipBytes((int) skipCount); } tag.addLocation(tag.getElementLength()); value = ""; } if (value != null && id == null && !value.equals("")) return value; else if (id == null) return null; else return value; } } public static class Reader extends ByteArrayReader<Metadata> { @Parameter private InitializeService initializeService; @Parameter private CodecService codecService; // -- AbstractReader API Methods -- @Override protected String[] createDomainArray() { return new String[] { FormatTools.MEDICAL_DOMAIN }; } // -- Reader API Methods -- @Override public boolean hasCompanionFiles() { return true; } @Override public ByteArrayPlane openPlane(final int imageIndex, long planeIndex, final ByteArrayPlane plane, final long[] planeMin, final long[] planeMax, final SCIFIOConfig config) throws FormatException, IOException { final Metadata meta = getMetadata(); plane.setColorTable(meta.getColorTable(imageIndex, planeIndex)); FormatTools.checkPlaneForReading(meta, imageIndex, planeIndex, plane .getData().length, planeMin, planeMax); final int xAxis = meta.get(imageIndex).getAxisIndex(Axes.X); final int yAxis = meta.get(imageIndex).getAxisIndex(Axes.Y); final int x = (int) planeMin[xAxis], y = (int) planeMin[yAxis], w = (int) planeMax[xAxis], h = (int) planeMax[yAxis]; final Hashtable<Integer, Vector<String>> fileList = meta.getFileList(); final Integer[] keys = fileList.keySet().toArray(new Integer[0]); Arrays.sort(keys); if (fileList.get(keys[imageIndex]).size() > 1) { final int fileNumber = (int) (planeIndex / meta.getImagesPerFile()); planeIndex = planeIndex % meta.getImagesPerFile(); final String file = fileList.get(keys[imageIndex]).get(fileNumber); final io.scif.Reader r = initializeService.initializeReader(file); return (ByteArrayPlane) r.openPlane(imageIndex, planeIndex, plane, planeMin, planeMax, config); } final int ec = meta.get(0).isIndexed() ? 1 : (int) meta.get(imageIndex).getAxisLength( Axes.CHANNEL); final int bpp = FormatTools.getBytesPerPixel(meta.get(imageIndex).getPixelType()); final int bytes = (int) (meta.get(imageIndex).getAxisLength(Axes.X) * meta.get(imageIndex).getAxisLength(Axes.Y) * bpp * ec); getStream().seek(meta.getOffsets()[(int) planeIndex]); if (meta.isRLE()) { // plane is compressed using run-length encoding final CodecOptions options = new CodecOptions(); options.maxBytes = (int) (meta.get(imageIndex).getAxisLength(Axes.X) * meta.get( imageIndex).getAxisLength(Axes.Y)); final PackbitsCodec codec = codecService.getCodec(PackbitsCodec.class); for (int c = 0; c < ec; c++) { byte[] t = null; if (bpp > 1) { // TODO unused int planeSize = bytes / (bpp * ec); final byte[][] tmp = new byte[bpp][]; for (int i = 0; i < bpp; i++) { tmp[i] = codec.decompress(getStream(), options); if (planeIndex < meta.getImagesPerFile() - 1 || i < bpp - 1) { while (getStream().read() == 0) { /* Read to non-0 data */} getStream().seek(getStream().getFilePointer() - 1); } } t = new byte[bytes / ec]; for (int i = 0; i < planeIndex; i++) { for (int j = 0; j < bpp; j++) { final int byteIndex = meta.get(imageIndex).isLittleEndian() ? bpp - j - 1 : j; if (i < tmp[byteIndex].length) { t[i * bpp + j] = tmp[byteIndex][i]; } } } } else { t = codec.decompress(getStream(), options); if (t.length < (bytes / ec)) { final byte[] tmp = t; t = new byte[bytes / ec]; System.arraycopy(tmp, 0, t, 0, tmp.length); } if (planeIndex < meta.getImagesPerFile() - 1 || c < ec - 1) { while (getStream().read() == 0) { /* Read to non-0 data */} getStream().seek(getStream().getFilePointer() - 1); } } final int rowLen = w * bpp; final int srcRowLen = (int) meta.get(imageIndex).getAxisLength(Axes.X) * bpp; // TODO unused int srcPlane = meta.getAxisLength(imageIndex, Axes.Y) * // srcRowLen; for (int row = 0; row < h; row++) { final int src = (row + y) * srcRowLen + x * bpp; final int dest = (h * c + row) * rowLen; final int len = Math.min(rowLen, t.length - src - 1); if (len < 0) break; System.arraycopy(t, src, plane.getBytes(), dest, len); } } } else if (meta.isJPEG() || meta.isJP2K()) { // plane is compressed using JPEG or JPEG-2000 final long end = planeIndex < meta.getOffsets().length - 1 ? meta.getOffsets()[(int) planeIndex + 1] : getStream().length(); byte[] b = new byte[(int) (end - getStream().getFilePointer())]; getStream().read(b); if (b[2] != (byte) 0xff) { final byte[] tmp = new byte[b.length + 1]; tmp[0] = b[0]; tmp[1] = b[1]; tmp[2] = (byte) 0xff; System.arraycopy(b, 2, tmp, 3, b.length - 2); b = tmp; } if ((b[3] & 0xff) >= 0xf0) { b[3] -= (byte) 0x30; } int pt = b.length - 2; while (pt >= 0 && b[pt] != (byte) 0xff || b[pt + 1] != (byte) 0xd9) { pt--; } if (pt < b.length - 2) { final byte[] tmp = b; b = new byte[pt + 2]; System.arraycopy(tmp, 0, b, 0, b.length); } final CodecOptions options = new CodecOptions(); options.littleEndian = meta.get(imageIndex).isLittleEndian(); options.interleaved = meta.get(imageIndex).getInterleavedAxisCount() > 0; final Codec codec = codecService.getCodec(meta.isJPEG() ? JPEGCodec.class : JPEG2000Codec.class); b = codec.decompress(b, options); final int rowLen = w * bpp; final int srcRowLen = (int) meta.get(imageIndex).getAxisLength(Axes.X) * bpp; final int srcPlane = (int) meta.get(imageIndex).getAxisLength(Axes.Y) * srcRowLen; for (int c = 0; c < ec; c++) { for (int row = 0; row < h; row++) { System.arraycopy(b, c * srcPlane + (row + y) * srcRowLen + x * bpp, plane.getBytes(), h * rowLen * c + row * rowLen, rowLen); } } } else if (meta.isDeflate()) { // TODO throw new UnsupportedCompressionException( "Deflate data is not supported."); } else { // plane is not compressed readPlane(getStream(), imageIndex, planeMin, planeMax, plane); } if (meta.isInverted()) { // pixels are stored such that white -> 0; invert the values so that // white -> 255 (or 65535) if (bpp == 1) { for (int i = 0; i < plane.getBytes().length; i++) { plane.getBytes()[i] = (byte) (255 - plane.getBytes()[i]); } } else if (bpp == 2) { if (meta.getMaxPixelValue() == -1) meta.setMaxPixelValue(65535); final boolean little = meta.get(imageIndex).isLittleEndian(); for (int i = 0; i < plane.getBytes().length; i += 2) { final short s = DataTools.bytesToShort(plane.getBytes(), i, 2, little); DataTools.unpackBytes(meta.getMaxPixelValue() - s, plane.getBytes(), i, 2, little); } } } // NB: do *not* apply the rescale function return plane; } } // -- DICOM Helper Classes -- private static class DICOMUtils { private static final int AE = 0x4145, AS = 0x4153, AT = 0x4154, CS = 0x4353; private static final int DA = 0x4441, DS = 0x4453, DT = 0x4454, FD = 0x4644; private static final int FL = 0x464C, IS = 0x4953, LO = 0x4C4F, LT = 0x4C54; private static final int PN = 0x504E, SH = 0x5348, SL = 0x534C, SS = 0x5353; private static final int ST = 0x5354, TM = 0x544D, UI = 0x5549, UL = 0x554C; private static final int US = 0x5553, UT = 0x5554, OB = 0x4F42, OW = 0x4F57; private static final int SQ = 0x5351, UN = 0x554E, QQ = 0x3F3F; private static final int IMPLICIT_VR = 0x2d2d; private static DICOMTag getNextTag(final RandomAccessInputStream stream) throws FormatException, IOException { return getNextTag(stream, false); } private static DICOMTag getNextTag(final RandomAccessInputStream stream, final boolean bigEndianTransferSyntax) throws FormatException, IOException { return getNextTag(stream, bigEndianTransferSyntax, false); } private static DICOMTag getNextTag(final RandomAccessInputStream stream, final boolean bigEndianTransferSyntax, final boolean isOddLocations) throws FormatException, IOException { final long fp = stream.getFilePointer(); int groupWord = stream.readShort() & 0xffff; final DICOMTag diTag = new DICOMTag(); boolean littleEndian = true; if (groupWord == 0x0800 && bigEndianTransferSyntax) { littleEndian = false; groupWord = 0x0008; stream.order(false); } else if (groupWord == 0xfeff || groupWord == 0xfffe) { stream.skipBytes(6); return DICOMUtils.getNextTag(stream, bigEndianTransferSyntax); } int elementWord = stream.readShort(); int tag = ((groupWord << 16) & 0xffff0000) | (elementWord & 0xffff); diTag.setElementLength(getLength(stream, diTag)); if (diTag.getElementLength() > stream.length()) { stream.seek(fp); littleEndian = !littleEndian; stream.order(littleEndian); groupWord = stream.readShort() & 0xffff; elementWord = stream.readShort(); tag = ((groupWord << 16) & 0xffff0000) | (elementWord & 0xffff); diTag.setElementLength(getLength(stream, diTag)); if (diTag.getElementLength() > stream.length()) { throw new FormatException("Invalid tag length " + diTag.getElementLength()); } diTag.setTagValue(tag); return diTag; } if (diTag.getElementLength() < 0 && groupWord == 0x7fe0) { stream.skipBytes(12); diTag.setElementLength(stream.readInt()); if (diTag.getElementLength() < 0) diTag.setElementLength(stream .readInt()); } if (diTag.getElementLength() == 0 && (groupWord == 0x7fe0 || tag == 0x291014)) { diTag.setElementLength(getLength(stream, diTag)); } else if (diTag.getElementLength() == 0) { stream.seek(stream.getFilePointer() - 4); final String v = stream.readString(2); if (v.equals("UT")) { stream.skipBytes(2); diTag.setElementLength(stream.readInt()); } else stream.skipBytes(2); } // HACK - needed to read some GE files // The element length must be even! if (!isOddLocations && (diTag.getElementLength() % 2) == 1) diTag .incrementElementLength(); // "Undefined" element length. // This is a sort of bracket that encloses a sequence of elements. if (diTag.getElementLength() == -1) { diTag.setElementLength(0); diTag.setInSequence(true); } diTag.setTagValue(tag); diTag.setLittleEndian(littleEndian); return diTag; } private static int getLength(final RandomAccessInputStream stream, final DICOMTag tag) throws IOException { final byte[] b = new byte[4]; stream.read(b); // We cannot know whether the VR is implicit or explicit // without the full DICOM Data Dictionary for public and // private groups. // We will assume the VR is explicit if the two bytes // match the known codes. It is possible that these two // bytes are part of a 32-bit length for an implicit VR. final int vr = ((b[0] & 0xff) << 8) | (b[1] & 0xff); tag.setVR(vr); switch (vr) { case OB: case OW: case SQ: case UN: // Explicit VR with 32-bit length if other two bytes are zero if ((b[2] == 0) || (b[3] == 0)) { return stream.readInt(); } tag.setVR(IMPLICIT_VR); return DataTools.bytesToInt(b, stream.isLittleEndian()); case AE: case AS: case AT: case CS: case DA: case DS: case DT: case FD: case FL: case IS: case LO: case LT: case PN: case SH: case SL: case SS: case ST: case TM: case UI: case UL: case US: case UT: case QQ: // Explicit VR with 16-bit length if (tag.get() == 0x00283006) { return DataTools.bytesToInt(b, 2, 2, stream.isLittleEndian()); } int n1 = DataTools.bytesToShort(b, 2, 2, stream.isLittleEndian()); int n2 = DataTools.bytesToShort(b, 2, 2, !stream.isLittleEndian()); n1 &= 0xffff; n2 &= 0xffff; if (n1 < 0 || n1 + stream.getFilePointer() > stream.length()) return n2; if (n2 < 0 || n2 + stream.getFilePointer() > stream.length()) return n1; return n1; case 0xffff: tag.setVR(IMPLICIT_VR); return 8; default: tag.setVR(IMPLICIT_VR); int len = DataTools.bytesToInt(b, stream.isLittleEndian()); if (len + stream.getFilePointer() > stream.length() || len < 0) { len = DataTools.bytesToInt(b, 2, 2, stream.isLittleEndian()); len &= 0xffff; } return len; } } } public static class DICOMTag { private int elementLength = 0; private int tagValue; private int vr = 0; private boolean inSequence = false; private int location = 0; private boolean littleEndian; public int getLocation() { return location; } public void setLocation(final int location) { this.location = location; } public void addLocation(final int offset) { location += offset; } public int getVR() { return vr; } public void setVR(final int vr) { this.vr = vr; } public int getElementLength() { return elementLength; } public void setElementLength(final int elementLength) { this.elementLength = elementLength; } public void incrementElementLength() { elementLength++; } public int get() { return tagValue; } public void setTagValue(final int tagValue) { this.tagValue = tagValue; } public boolean isInSequence() { return inSequence; } public void setInSequence(final boolean inSequence) { this.inSequence = inSequence; } public boolean isLittleEndian() { return littleEndian; } public void setLittleEndian(final boolean littleEndian) { this.littleEndian = littleEndian; } } }
src/main/java/io/scif/formats/DICOMFormat.java
/* * #%L * SCIFIO library for reading and converting scientific file formats. * %% * Copyright (C) 2011 - 2015 Board of Regents of the University of * Wisconsin-Madison * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package io.scif.formats; import io.scif.AbstractChecker; import io.scif.AbstractFormat; import io.scif.AbstractMetadata; import io.scif.AbstractParser; import io.scif.ByteArrayPlane; import io.scif.ByteArrayReader; import io.scif.FilePattern; import io.scif.Format; import io.scif.FormatException; import io.scif.HasColorTable; import io.scif.ImageMetadata; import io.scif.MetadataLevel; import io.scif.UnsupportedCompressionException; import io.scif.codec.Codec; import io.scif.codec.CodecOptions; import io.scif.codec.CodecService; import io.scif.codec.JPEG2000Codec; import io.scif.codec.JPEGCodec; import io.scif.codec.PackbitsCodec; import io.scif.common.DataTools; import io.scif.config.SCIFIOConfig; import io.scif.io.Location; import io.scif.io.RandomAccessInputStream; import io.scif.services.InitializeService; import io.scif.util.FormatTools; import java.io.IOException; import java.util.Arrays; import java.util.Hashtable; import java.util.Vector; import net.imagej.axis.Axes; import net.imglib2.display.ColorTable; import net.imglib2.display.ColorTable16; import net.imglib2.display.ColorTable8; import org.scijava.plugin.Parameter; import org.scijava.plugin.Plugin; /** * DICOMReader is the file format reader for DICOM files. Much of this code is * adapted from <a * href="http://imagej.net/developer/source/ij/plugin/DICOM.java.html">ImageJ's * DICOM reader</a>. * * @author Mark Hiner */ @Plugin(type = Format.class, name = "DICOM") public class DICOMFormat extends AbstractFormat { // -- Constants -- public static final String DICOM_MAGIC_STRING = "DICM"; private static final Hashtable<Integer, String> TYPES = buildTypes(); // -- AbstractFormat Methods -- @Override protected String[] makeSuffixArray() { return new String[] { "dic", "dcm", "dicom", "jp2", "j2ki", "j2kr", "raw", "ima" }; } // -- Static Helper methods -- /** * Assemble the data dictionary. This is incomplete at best, since there are * literally thousands of fields defined by the DICOM specifications. */ private static Hashtable<Integer, String> buildTypes() { final Hashtable<Integer, String> dict = new Hashtable<Integer, String>(); dict.put(0x00020002, "Media Storage SOP Class UID"); dict.put(0x00020003, "Media Storage SOP Instance UID"); dict.put(0x00020010, "Transfer Syntax UID"); dict.put(0x00020012, "Implementation Class UID"); dict.put(0x00020013, "Implementation Version Name"); dict.put(0x00020016, "Source Application Entity Title"); dict.put(0x00080005, "Specific Character Set"); dict.put(0x00080008, "Image Type"); dict.put(0x00080010, "Recognition Code"); dict.put(0x00080012, "Instance Creation Date"); dict.put(0x00080013, "Instance Creation Time"); dict.put(0x00080014, "Instance Creator UID"); dict.put(0x00080016, "SOP Class UID"); dict.put(0x00080018, "SOP Instance UID"); dict.put(0x0008001a, "Related General SOP Class UID"); dict.put(0x0008001b, "Original Specialized SOP Class UID"); dict.put(0x00080020, "Study Date"); dict.put(0x00080021, "Series Date"); dict.put(0x00080022, "Acquisition Date"); dict.put(0x00080023, "Content Date"); dict.put(0x00080024, "Overlay Date"); dict.put(0x00080025, "Curve Date"); dict.put(0x0008002a, "Acquisition Date/Time"); dict.put(0x00080030, "Study Time"); dict.put(0x00080031, "Series Time"); dict.put(0x00080032, "Acquisition Time"); dict.put(0x00080033, "Content Time"); dict.put(0x00080034, "Overlay Time"); dict.put(0x00080035, "Curve Time"); dict.put(0x00080041, "Data Set Subtype"); dict.put(0x00080050, "Accession Number"); dict.put(0x00080052, "Query/Retrieve Level"); dict.put(0x00080054, "Retrieve AE Title"); dict.put(0x00080056, "Instance Availability"); dict.put(0x00080058, "Failed SOP Instance UID List"); dict.put(0x00080060, "Modality"); dict.put(0x00080061, "Modalities in Study"); dict.put(0x00080062, "SOP Classes in Study"); dict.put(0x00080064, "Conversion Type"); dict.put(0x00080068, "Presentation Intent Type"); dict.put(0x00080070, "Manufacturer"); dict.put(0x00080080, "Institution Name"); dict.put(0x00080081, "Institution Address"); dict.put(0x00080082, "Institution Code Sequence"); dict.put(0x00080090, "Referring Physician's Name"); dict.put(0x00080092, "Referring Physician's Address"); dict.put(0x00080094, "Referring Physician's Telephone"); dict.put(0x00080096, "Referring Physician ID"); dict.put(0x00080100, "Code Value"); dict.put(0x00080102, "Coding Scheme Designator"); dict.put(0x00080103, "Coding Scheme Version"); dict.put(0x00080104, "Code Meaning"); dict.put(0x00080105, "Mapping Resource"); dict.put(0x00080106, "Context Group Version"); dict.put(0x00080107, "Context Group Local Version"); dict.put(0x0008010b, "Context Group Extension Flag"); dict.put(0x0008010c, "Coding Scheme UID"); dict.put(0x0008010d, "Context Group Extension Creator UID"); dict.put(0x0008010f, "Context ID"); dict.put(0x00080110, "Coding Scheme ID"); dict.put(0x00080112, "Coding Scheme Registry"); dict.put(0x00080114, "Coding Scheme External ID"); dict.put(0x00080115, "Coding Scheme Name"); dict.put(0x00080116, "Responsible Organization"); dict.put(0x00080201, "Timezone Offset from UTC"); dict.put(0x00081010, "Station Name"); dict.put(0x00081030, "Study Description"); dict.put(0x00081032, "Procedure Code Sequence"); dict.put(0x0008103e, "Series Description"); dict.put(0x00081040, "Institutional Department Name"); dict.put(0x00081048, "Physician(s) of Record"); dict.put(0x00081049, "Physician(s) of Record ID"); dict.put(0x00081050, "Performing Physician's Name"); dict.put(0x00081052, "Performing Physican ID"); dict.put(0x00081060, "Name of Physician(s) Reading Study"); dict.put(0x00081062, "Physician(s) Reading Study ID"); dict.put(0x00081070, "Operator's Name"); dict.put(0x00081072, "Operator ID"); dict.put(0x00081080, "Admitting Diagnoses Description"); dict.put(0x00081084, "Admitting Diagnoses Code Sequence"); dict.put(0x00081090, "Manufacturer's Model Name"); dict.put(0x00081100, "Referenced Results Sequence"); dict.put(0x00081110, "Referenced Study Sequence"); dict.put(0x00081111, "Referenced Performed Procedure Step"); dict.put(0x00081115, "Referenced Series Sequence"); dict.put(0x00081120, "Referenced Patient Sequence"); dict.put(0x00081125, "Referenced Visit Sequence"); dict.put(0x00081130, "Referenced Overlay Sequence"); dict.put(0x0008113a, "Referenced Waveform Sequence"); dict.put(0x00081140, "Referenced Image Sequence"); dict.put(0x00081145, "Referenced Curve Sequence"); dict.put(0x0008114a, "Referenced Instance Sequence"); dict.put(0x00081150, "Referenced SOP Class UID"); dict.put(0x00081155, "Referenced SOP Instance UID"); dict.put(0x0008115a, "SOP Classes Supported"); dict.put(0x00081160, "Referenced Frame Number"); dict.put(0x00081195, "Transaction UID"); dict.put(0x00081197, "Failure Reason"); dict.put(0x00081198, "Failed SOP Sequence"); dict.put(0x00081199, "Referenced SOP Sequence"); dict.put(0x00081200, "Studies Containing Other Referenced Instances Sequence"); dict.put(0x00081250, "Related Series Sequence"); dict.put(0x00082111, "Derivation Description"); dict.put(0x00082112, "Source Image Sequence"); dict.put(0x00082120, "Stage Name"); dict.put(0x00082122, "Stage Number"); dict.put(0x00082124, "Number of Stages"); dict.put(0x00082127, "View Name"); dict.put(0x00082128, "View Number"); dict.put(0x00082129, "Number of Event Timers"); dict.put(0x0008212a, "Number of Views in Stage"); dict.put(0x00082130, "Event Elapsed Time(s)"); dict.put(0x00082132, "Event Timer Name(s)"); dict.put(0x00082142, "Start Trim"); dict.put(0x00082143, "Stop Trim"); dict.put(0x00082144, "Recommended Display Frame Rate"); dict.put(0x00082218, "Anatomic Region Sequence"); dict.put(0x00082220, "Anatomic Region Modifier Sequence"); dict.put(0x00082228, "Primary Anatomic Structure Sequence"); dict.put(0x00082229, "Anatomic Structure Sequence"); dict.put(0x00082230, "Primary Anatomic Structure Modifier"); dict.put(0x00082240, "Transducer Position Sequence"); dict.put(0x00082242, "Transducer Position Modifier Sequence"); dict.put(0x00082244, "Transducer Orientation Sequence"); dict.put(0x00082246, "Transducer Orientation Modifier"); dict.put(0x00083001, "Alternate Representation Sequence"); dict.put(0x00089007, "Frame Type"); dict.put(0x00089092, "Referenced Image Evidence Sequence"); dict.put(0x00089121, "Referenced Raw Data Sequence"); dict.put(0x00089123, "Creator-Version UID"); dict.put(0x00089124, "Derivation Image Sequence"); dict.put(0x00089154, "Source Image Evidence Sequence"); dict.put(0x00089205, "Pixel Representation"); dict.put(0x00089206, "Volumetric Properties"); dict.put(0x00089207, "Volume Based Calculation Technique"); dict.put(0x00089208, "Complex Image Component"); dict.put(0x00089209, "Acquisition Contrast"); dict.put(0x00089215, "Derivation Code Sequence"); dict.put(0x00089237, "Reference Grayscale Presentation State"); dict.put(0x00100010, "Patient's Name"); dict.put(0x00100020, "Patient ID"); dict.put(0x00100021, "Issuer of Patient ID"); dict.put(0x00100030, "Patient's Birth Date"); dict.put(0x00100032, "Patient's Birth Time"); dict.put(0x00100040, "Patient's Sex"); dict.put(0x00100050, "Patient's Insurance Plane Code"); dict.put(0x00100101, "Patient's Primary Language Code"); dict.put(0x00100102, "Patient's Primary Language Modifier"); dict.put(0x00101000, "Other Patient IDs"); dict.put(0x00101001, "Other Patient Names"); dict.put(0x00101005, "Patient's Birth Name"); dict.put(0x00101010, "Patient's Age"); dict.put(0x00101020, "Patient's Size"); dict.put(0x00101030, "Patient's Weight"); dict.put(0x00101040, "Patient's Address"); dict.put(0x00101060, "Patient's Mother's Birth Name"); dict.put(0x00101080, "Military Rank"); dict.put(0x00101081, "Branch of Service"); dict.put(0x00101090, "Medical Record Locator"); dict.put(0x00102000, "Medical Alerts"); dict.put(0x00102110, "Contrast Allergies"); dict.put(0x00102150, "Country of Residence"); dict.put(0x00102152, "Region of Residence"); dict.put(0x00102154, "Patient's Telephone Numbers"); dict.put(0x00102160, "Ethnic Group"); dict.put(0x00102180, "Occupation"); dict.put(0x001021a0, "Smoking Status"); dict.put(0x001021b0, "Additional Patient History"); dict.put(0x001021c0, "Pregnancy Status"); dict.put(0x001021d0, "Last Menstrual Date"); dict.put(0x001021f0, "Patient's Religious Preference"); dict.put(0x00104000, "Patient Comments"); dict.put(0x00120010, "Clinical Trial Sponsor Name"); dict.put(0x00120020, "Clinical Trial Protocol ID"); dict.put(0x00120021, "Clinical Trial Protocol Name"); dict.put(0x00120030, "Clinical Trial Site ID"); dict.put(0x00120031, "Clinical Trial Site Name"); dict.put(0x00120040, "Clinical Trial Subject ID"); dict.put(0x00120042, "Clinical Trial Subject Reading ID"); dict.put(0x00120050, "Clinical Trial Time Point ID"); dict.put(0x00120051, "Clinical Trial Time Point Description"); dict.put(0x00120060, "Clinical Trial Coordinating Center"); dict.put(0x00180010, "Contrast/Bolus Agent"); dict.put(0x00180012, "Contrast/Bolus Agent Sequence"); dict.put(0x00180014, "Contrast/Bolus Admin. Route Sequence"); dict.put(0x00180015, "Body Part Examined"); dict.put(0x00180020, "Scanning Sequence"); dict.put(0x00180021, "Sequence Variant"); dict.put(0x00180022, "Scan Options"); dict.put(0x00180023, "MR Acquisition Type"); dict.put(0x00180024, "Sequence Name"); dict.put(0x00180025, "Angio Flag"); dict.put(0x00180026, "Intervention Drug Information Sequence"); dict.put(0x00180027, "Intervention Drug Stop Time"); dict.put(0x00180028, "Intervention Drug Dose"); dict.put(0x00180029, "Intervention Drug Sequence"); dict.put(0x0018002a, "Additional Drug Sequence"); dict.put(0x00180031, "Radiopharmaceutical"); dict.put(0x00180034, "Intervention Drug Name"); dict.put(0x00180035, "Intervention Drug Start Time"); dict.put(0x00180036, "Intervention Sequence"); dict.put(0x00180038, "Intervention Status"); dict.put(0x0018003a, "Intervention Description"); dict.put(0x00180040, "Cine Rate"); dict.put(0x00180050, "Slice Thickness"); dict.put(0x00180060, "KVP"); dict.put(0x00180070, "Counts Accumulated"); dict.put(0x00180071, "Acquisition Termination Condition"); dict.put(0x00180072, "Effective Duration"); dict.put(0x00180073, "Acquisition Start Condition"); dict.put(0x00180074, "Acquisition Start Condition Data"); dict.put(0x00180075, "Acquisition Termination Condition Data"); dict.put(0x00180080, "Repetition Time"); dict.put(0x00180081, "Echo Time"); dict.put(0x00180082, "Inversion Time"); dict.put(0x00180083, "Number of Averages"); dict.put(0x00180084, "Imaging Frequency"); dict.put(0x00180085, "Imaged Nucleus"); dict.put(0x00180086, "Echo Number(s)"); dict.put(0x00180087, "Magnetic Field Strength"); dict.put(0x00180088, "Spacing Between Slices"); dict.put(0x00180089, "Number of Phase Encoding Steps"); dict.put(0x00180090, "Data Collection Diameter"); dict.put(0x00180091, "Echo Train Length"); dict.put(0x00180093, "Percent Sampling"); dict.put(0x00180094, "Percent Phase Field of View"); dict.put(0x00180095, "Pixel Bandwidth"); dict.put(0x00181000, "Device Serial Number"); dict.put(0x00181004, "Plate ID"); dict.put(0x00181010, "Secondary Capture Device ID"); dict.put(0x00181011, "Hardcopy Creation Device ID"); dict.put(0x00181012, "Date of Secondary Capture"); dict.put(0x00181014, "Time of Secondary Capture"); dict.put(0x00181016, "Secondary Capture Device Manufacturer"); dict.put(0x00181017, "Hardcopy Device Manufacturer"); dict.put(0x00181018, "Secondary Capture Device Model Name"); dict.put(0x00181019, "Secondary Capture Device Software Version"); dict.put(0x0018101a, "Hardcopy Device Software Version"); dict.put(0x0018101b, "Hardcopy Device Model Name"); dict.put(0x00181020, "Software Version(s)"); dict.put(0x00181022, "Video Image Format Acquired"); dict.put(0x00181023, "Digital Image Format Acquired"); dict.put(0x00181030, "Protocol Name"); dict.put(0x00181040, "Contrast/Bolus Route"); dict.put(0x00181041, "Contrast/Bolus Volume"); dict.put(0x00181042, "Contrast/Bolus Start Time"); dict.put(0x00181043, "Contrast/Bolus Stop Time"); dict.put(0x00181044, "Contrast/Bolus Total Dose"); dict.put(0x00181045, "Syringe Counts"); dict.put(0x00181046, "Contrast Flow Rate"); dict.put(0x00181047, "Contrast Flow Duration"); dict.put(0x00181048, "Contrast/Bolus Ingredient"); dict.put(0x00181049, "Contrast Ingredient Concentration"); dict.put(0x00181050, "Spatial Resolution"); dict.put(0x00181060, "Trigger Time"); dict.put(0x00181061, "Trigger Source or Type"); dict.put(0x00181062, "Nominal Interval"); dict.put(0x00181063, "Frame Time"); dict.put(0x00181064, "Framing Type"); dict.put(0x00181065, "Frame Time Vector"); dict.put(0x00181066, "Frame Delay"); dict.put(0x00181067, "Image Trigger Delay"); dict.put(0x00181068, "Multiplex Group Time Offset"); dict.put(0x00181069, "Trigger Time Offset"); dict.put(0x0018106a, "Synchronization Trigger"); dict.put(0x0018106c, "Synchronization Channel"); dict.put(0x0018106e, "Trigger Sample Position"); dict.put(0x00181070, "Radiopharmaceutical Route"); dict.put(0x00181071, "Radiopharmaceutical Volume"); dict.put(0x00181072, "Radiopharmaceutical Start Time"); dict.put(0x00181073, "Radiopharmaceutical Stop Time"); dict.put(0x00181074, "Radionuclide Total Dose"); dict.put(0x00181075, "Radionuclide Half Life"); dict.put(0x00181076, "Radionuclide Positron Fraction"); dict.put(0x00181077, "Radiopharmaceutical Specific Activity"); dict.put(0x00181080, "Beat Rejection Flag"); dict.put(0x00181081, "Low R-R Value"); dict.put(0x00181082, "High R-R Value"); dict.put(0x00181083, "Intervals Acquired"); dict.put(0x00181084, "Intervals Rejected"); dict.put(0x00181085, "PVC Rejection"); dict.put(0x00181086, "Skip Beats"); dict.put(0x00181088, "Heart Rate"); dict.put(0x00181090, "Cardiac Number of Images"); dict.put(0x00181094, "Trigger Window"); dict.put(0x00181100, "Reconstruction Diameter"); dict.put(0x00181110, "Distance Source to Detector"); dict.put(0x00181111, "Distance Source to Patient"); dict.put(0x00181114, "Estimated Radiographic Mag. Factor"); dict.put(0x00181120, "Gantry/Detector Tilt"); dict.put(0x00181121, "Gantry/Detector Skew"); dict.put(0x00181130, "Table Height"); dict.put(0x00181131, "Table Traverse"); dict.put(0x00181134, "Table Motion"); dict.put(0x00181135, "Table Vertical Increment"); dict.put(0x00181136, "Table Lateral Increment"); dict.put(0x00181137, "Table Longitudinal Increment"); dict.put(0x00181138, "Table Angle"); dict.put(0x0018113a, "Table Type"); dict.put(0x00181140, "Rotation Direction"); dict.put(0x00181141, "Angular Position"); dict.put(0x00181142, "Radial Position"); dict.put(0x00181143, "Scan Arc"); dict.put(0x00181144, "Angular Step"); dict.put(0x00181145, "Center of Rotation Offset"); dict.put(0x00181147, "Field of View Shape"); dict.put(0x00181149, "Field of View Dimension(s)"); dict.put(0x00181150, "Exposure Time"); dict.put(0x00181151, "X-ray Tube Current"); dict.put(0x00181152, "Exposure"); dict.put(0x00181153, "Exposure in uAs"); dict.put(0x00181154, "Average Pulse Width"); dict.put(0x00181155, "Radiation Setting"); dict.put(0x00181156, "Rectification Type"); dict.put(0x0018115a, "Radiation Mode"); dict.put(0x0018115e, "Image Area Dose Product"); dict.put(0x00181160, "Filter Type"); dict.put(0x00181161, "Type of Filters"); dict.put(0x00181162, "Intensifier Size"); dict.put(0x00181164, "Imager Pixel Spacing"); dict.put(0x00181166, "Grid"); dict.put(0x00181170, "Generator Power"); dict.put(0x00181180, "Collimator/Grid Name"); dict.put(0x00181181, "Collimator Type"); dict.put(0x00181182, "Focal Distance"); dict.put(0x00181183, "X Focus Center"); dict.put(0x00181184, "Y Focus Center"); dict.put(0x00181190, "Focal Spot(s)"); dict.put(0x00181191, "Anode Target Material"); dict.put(0x001811a0, "Body Part Thickness"); dict.put(0x001811a2, "Compression Force"); dict.put(0x00181200, "Date of Last Calibration"); dict.put(0x00181201, "Time of Last Calibration"); dict.put(0x00181210, "Convolution Kernel"); dict.put(0x00181242, "Actual Frame Duration"); dict.put(0x00181243, "Count Rate"); dict.put(0x00181244, "Preferred Playback Sequencing"); dict.put(0x00181250, "Receive Coil Name"); dict.put(0x00181251, "Transmit Coil Name"); dict.put(0x00181260, "Plate Type"); dict.put(0x00181261, "Phosphor Type"); dict.put(0x00181300, "Scan Velocity"); dict.put(0x00181301, "Whole Body Technique"); dict.put(0x00181302, "Scan Length"); dict.put(0x00181310, "Acquisition Matrix"); dict.put(0x00181312, "In-plane Phase Encoding Direction"); dict.put(0x00181314, "Flip Angle"); dict.put(0x00181315, "Variable Flip Angle Flag"); dict.put(0x00181316, "SAR"); dict.put(0x00181318, "dB/dt"); dict.put(0x00181400, "Acquisition Device Processing Descr."); dict.put(0x00181401, "Acquisition Device Processing Code"); dict.put(0x00181402, "Cassette Orientation"); dict.put(0x00181403, "Cassette Size"); dict.put(0x00181404, "Exposures on Plate"); dict.put(0x00181405, "Relative X-ray Exposure"); dict.put(0x00181450, "Column Angulation"); dict.put(0x00181460, "Tomo Layer Height"); dict.put(0x00181470, "Tomo Angle"); dict.put(0x00181480, "Tomo Time"); dict.put(0x00181490, "Tomo Type"); dict.put(0x00181491, "Tomo Class"); dict.put(0x00181495, "Number of Tomosynthesis Source Images"); dict.put(0x00181500, "Positioner Motion"); dict.put(0x00181508, "Positioner Type"); dict.put(0x00181510, "Positioner Primary Angle"); dict.put(0x00181511, "Positioner Secondary Angle"); dict.put(0x00181520, "Positioner Primary Angle Increment"); dict.put(0x00181521, "Positioner Secondary Angle Increment"); dict.put(0x00181530, "Detector Primary Angle"); dict.put(0x00181531, "Detector Secondary Angle"); dict.put(0x00181600, "Shutter Shape"); dict.put(0x00181602, "Shutter Left Vertical Edge"); dict.put(0x00181604, "Shutter Right Vertical Edge"); dict.put(0x00181606, "Shutter Upper Horizontal Edge"); dict.put(0x00181608, "Shutter Lower Horizontal Edge"); dict.put(0x00181610, "Center of Circular Shutter"); dict.put(0x00181612, "Radius of Circular Shutter"); dict.put(0x00181620, "Vertices of the Polygonal Shutter"); dict.put(0x00181622, "Shutter Presentation Value"); dict.put(0x00181623, "Shutter Overlay Group"); dict.put(0x00181700, "Collimator Shape"); dict.put(0x00181702, "Collimator Left Vertical Edge"); dict.put(0x00181704, "Collimator Right Vertical Edge"); dict.put(0x00181706, "Collimator Upper Horizontal Edge"); dict.put(0x00181708, "Collimator Lower Horizontal Edge"); dict.put(0x00181710, "Center of Circular Collimator"); dict.put(0x00181712, "Radius of Circular Collimator"); dict.put(0x00181720, "Vertices of the polygonal Collimator"); dict.put(0x00181800, "Acquisition Time Synchronized"); dict.put(0x00181801, "Time Source"); dict.put(0x00181802, "Time Distribution Protocol"); dict.put(0x00181803, "NTP Source Address"); dict.put(0x00182001, "Page Number Vector"); dict.put(0x00182002, "Frame Label Vector"); dict.put(0x00182003, "Frame Primary Angle Vector"); dict.put(0x00182004, "Frame Secondary Angle Vector"); dict.put(0x00182005, "Slice Location Vector"); dict.put(0x00182006, "Display Window Label Vector"); dict.put(0x00182010, "Nominal Scanned Pixel Spacing"); dict.put(0x00182020, "Digitizing Device Transport Direction"); dict.put(0x00182030, "Rotation of Scanned Film"); dict.put(0x00183100, "IVUS Acquisition"); dict.put(0x00183101, "IVUS Pullback Rate"); dict.put(0x00183102, "IVUS Gated Rate"); dict.put(0x00183103, "IVUS Pullback Start Frame Number"); dict.put(0x00183104, "IVUS Pullback Stop Frame Number"); dict.put(0x00183105, "Lesion Number"); dict.put(0x00185000, "Output Power"); dict.put(0x00185010, "Transducer Data"); dict.put(0x00185012, "Focus Depth"); dict.put(0x00185020, "Processing Function"); dict.put(0x00185021, "Postprocessing Fuction"); dict.put(0x00185022, "Mechanical Index"); dict.put(0x00185024, "Bone Thermal Index"); dict.put(0x00185026, "Cranial Thermal Index"); dict.put(0x00185027, "Soft Tissue Thermal Index"); dict.put(0x00185028, "Soft Tissue-focus Thermal Index"); dict.put(0x00185029, "Soft Tissue-surface Thermal Index"); dict.put(0x00185050, "Depth of scan field"); dict.put(0x00185100, "Patient Position"); dict.put(0x00185101, "View Position"); dict.put(0x00185104, "Projection Eponymous Name Code"); dict.put(0x00186000, "Sensitivity"); dict.put(0x00186011, "Sequence of Ultrasound Regions"); dict.put(0x00186012, "Region Spatial Format"); dict.put(0x00186014, "Region Data Type"); dict.put(0x00186016, "Region Flags"); dict.put(0x00186018, "Region Location Min X0"); dict.put(0x0018601a, "Region Location Min Y0"); dict.put(0x0018601c, "Region Location Max X1"); dict.put(0x0018601e, "Region Location Max Y1"); dict.put(0x00186020, "Reference Pixel X0"); dict.put(0x00186022, "Reference Pixel Y0"); dict.put(0x00186024, "Physical Units X Direction"); dict.put(0x00186026, "Physical Units Y Direction"); dict.put(0x00186028, "Reference Pixel Physical Value X"); dict.put(0x0018602a, "Reference Pixel Physical Value Y"); dict.put(0x0018602c, "Physical Delta X"); dict.put(0x0018602e, "Physical Delta Y"); dict.put(0x00186030, "Transducer Frequency"); dict.put(0x00186031, "Transducer Type"); dict.put(0x00186032, "Pulse Repetition Frequency"); dict.put(0x00186034, "Doppler Correction Angle"); dict.put(0x00186036, "Steering Angle"); dict.put(0x00186039, "Doppler Sample Volume X Position"); dict.put(0x0018603b, "Doppler Sample Volume Y Position"); dict.put(0x0018603d, "TM-Line Position X0"); dict.put(0x0018603f, "TM-Line Position Y0"); dict.put(0x00186041, "TM-Line Position X1"); dict.put(0x00186043, "TM-Line Position Y1"); dict.put(0x00186044, "Pixel Component Organization"); dict.put(0x00186046, "Pixel Component Mask"); dict.put(0x00186048, "Pixel Component Range Start"); dict.put(0x0018604a, "Pixel Component Range Stop"); dict.put(0x0018604c, "Pixel Component Physical Units"); dict.put(0x0018604e, "Pixel Component Data Type"); dict.put(0x00186050, "Number of Table Break Points"); dict.put(0x00186052, "Table of X Break Points"); dict.put(0x00186054, "Table of Y Break Points"); dict.put(0x00186056, "Number of Table Entries"); dict.put(0x00186058, "Table of Pixel Values"); dict.put(0x0018605a, "Table of Parameter Values"); dict.put(0x00186060, "R Wave Time Vector"); dict.put(0x00187000, "Detector Conditions Nominal Flag"); dict.put(0x00187001, "Detector Temperature"); dict.put(0x00187004, "Detector Type"); dict.put(0x00187005, "Detector Configuration"); dict.put(0x00187006, "Detector Description"); dict.put(0x00187008, "Detector Mode"); dict.put(0x0018700a, "Detector ID"); dict.put(0x0018700c, "Date of Last Detector Calibration"); dict.put(0x0018700e, "Time of Last Detector Calibration"); dict.put(0x00187012, "Detector Time Since Last Exposure"); dict.put(0x00187014, "Detector Active Time"); dict.put(0x00187016, "Detector Activation Offset"); dict.put(0x0018701a, "Detector Binning"); dict.put(0x00187020, "Detector Element Physical Size"); dict.put(0x00187022, "Detector Element Spacing"); dict.put(0x00187024, "Detector Active Shape"); dict.put(0x00187026, "Detector Active Dimension(s)"); dict.put(0x00187028, "Detector Active Origin"); dict.put(0x0018702a, "Detector Manufacturer Name"); dict.put(0x0018702b, "Detector Model Name"); dict.put(0x00187030, "Field of View Origin"); dict.put(0x00187032, "Field of View Rotation"); dict.put(0x00187034, "Field of View Horizontal Flip"); dict.put(0x00187040, "Grid Absorbing Material"); dict.put(0x00187041, "Grid Spacing Material"); dict.put(0x00187042, "Grid Thickness"); dict.put(0x00187044, "Grid Pitch"); dict.put(0x00187046, "Grid Aspect Ratio"); dict.put(0x00187048, "Grid Period"); dict.put(0x0018704c, "Grid Focal Distance"); dict.put(0x00187050, "Filter Material"); dict.put(0x00187052, "Filter Thickness Min"); dict.put(0x00187054, "Filter Thickness Max"); dict.put(0x00187060, "Exposure Control Mode"); dict.put(0x0020000d, "Study Instance UID"); dict.put(0x0020000e, "Series Instance UID"); dict.put(0x00200011, "Series Number"); dict.put(0x00200012, "Acquisition Number"); dict.put(0x00200013, "Instance Number"); dict.put(0x00200020, "Patient Orientation"); dict.put(0x00200030, "Image Position"); dict.put(0x00200032, "Image Position (Patient)"); dict.put(0x00200037, "Image Orientation (Patient)"); dict.put(0x00200050, "Location"); dict.put(0x00200052, "Frame of Reference UID"); dict.put(0x00200070, "Image Geometry Type"); dict.put(0x00201001, "Acquisitions in Series"); dict.put(0x00201020, "Reference"); dict.put(0x00201041, "Slice Location"); // skipped a bunch of stuff here - not used dict.put(0x00280002, "Samples per pixel"); dict.put(0x00280003, "Samples per pixel used"); dict.put(0x00280004, "Photometric Interpretation"); dict.put(0x00280006, "Planar Configuration"); dict.put(0x00280008, "Number of frames"); dict.put(0x00280009, "Frame Increment Pointer"); dict.put(0x0028000a, "Frame Dimension Pointer"); dict.put(0x00280010, "Rows"); dict.put(0x00280011, "Columns"); dict.put(0x00280012, "Planes"); dict.put(0x00280014, "Ultrasound Color Data Present"); dict.put(0x00280030, "Pixel Spacing"); dict.put(0x00280031, "Zoom Factor"); dict.put(0x00280032, "Zoom Center"); dict.put(0x00280034, "Pixel Aspect Ratio"); dict.put(0x00280051, "Corrected Image"); dict.put(0x00280100, "Bits Allocated"); dict.put(0x00280101, "Bits Stored"); dict.put(0x00280102, "High Bit"); dict.put(0x00280103, "Pixel Representation"); dict.put(0x00280106, "Smallest Image Pixel Value"); dict.put(0x00280107, "Largest Image Pixel Value"); dict.put(0x00280108, "Smallest Pixel Value in Series"); dict.put(0x00280109, "Largest Pixel Value in Series"); dict.put(0x00280110, "Smallest Image Pixel Value in Plane"); dict.put(0x00280111, "Largest Image Pixel Value in Plane"); dict.put(0x00280120, "Pixel Padding Value"); dict.put(0x00280300, "Quality Control Image"); dict.put(0x00280301, "Burned in Annotation"); dict.put(0x00281040, "Pixel Intensity Relationship"); dict.put(0x00281041, "Pixel Intensity Relationship Sign"); dict.put(0x00281050, "Window Center"); dict.put(0x00281051, "Window Width"); dict.put(0x00281052, "Rescale Intercept"); dict.put(0x00281053, "Rescale Slope"); dict.put(0x00281054, "Rescale Type"); dict.put(0x00281055, "Window Center and Width Explanation"); dict.put(0x00281090, "Recommended Viewing Mode"); dict.put(0x00281101, "Red Palette Color LUT Descriptor"); dict.put(0x00281102, "Green Palette Color LUT Descriptor"); dict.put(0x00281103, "Blue Palette Color LUT Descriptor"); dict.put(0x00281199, "Palette Color LUT UID"); dict.put(0x00281201, "Red Palette Color LUT Data"); dict.put(0x00281202, "Green Palette Color LUT Data"); dict.put(0x00281203, "Blue Palette Color LUT Data"); dict.put(0x00281221, "Segmented Red Palette Color LUT Data"); dict.put(0x00281222, "Segmented Green Palette Color LUT Data"); dict.put(0x00281223, "Segmented Blue Palette Color LUT Data"); dict.put(0x00281300, "Implant Present"); dict.put(0x00281350, "Partial View"); dict.put(0x00281351, "Partial View Description"); dict.put(0x00282110, "Lossy Image Compression"); dict.put(0x00282112, "Lossy Image Compression Ratio"); dict.put(0x00282114, "Lossy Image Compression Method"); dict.put(0x00283000, "Modality LUT Sequence"); dict.put(0x00283002, "LUT Descriptor"); dict.put(0x00283003, "LUT Explanation"); dict.put(0x00283004, "Modality LUT Type"); dict.put(0x00283006, "LUT Data"); dict.put(0x00283010, "VOI LUT Sequence"); dict.put(0x00283110, "Softcopy VOI LUT Sequence"); dict.put(0x00285000, "Bi-Plane Acquisition Sequence"); dict.put(0x00286010, "Representative Frame Number"); dict.put(0x00286020, "Frame Numbers of Interest (FOI)"); dict.put(0x00286022, "Frame(s) of Interest Description"); dict.put(0x00286023, "Frame of Interest Type"); dict.put(0x00286040, "R Wave Pointer"); dict.put(0x00286100, "Mask Subtraction Sequence"); dict.put(0x00286101, "Mask Operation"); dict.put(0x00286102, "Applicable Frame Range"); dict.put(0x00286110, "Mask Frame Numbers"); dict.put(0x00286112, "Contrast Frame Averaging"); dict.put(0x00286114, "Mask Sub-pixel Shift"); dict.put(0x00286120, "TID Offset"); dict.put(0x00286190, "Mask Operation Explanation"); dict.put(0x00289001, "Data Point Rows"); dict.put(0x00289002, "Data Point Columns"); dict.put(0x00289003, "Signal Domain Columns"); dict.put(0x00289108, "Data Representation"); dict.put(0x00289110, "Pixel Measures Sequence"); dict.put(0x00289132, "Frame VOI LUT Sequence"); dict.put(0x00289145, "Pixel Value Transformation Sequence"); dict.put(0x00289235, "Signal Domain Rows"); // skipping some more stuff dict.put(0x00540011, "Number of Energy Windows"); dict.put(0x00540021, "Number of Detectors"); dict.put(0x00540051, "Number of Rotations"); dict.put(0x00540080, "Slice Vector"); dict.put(0x00540081, "Number of Slices"); dict.put(0x00540202, "Type of Detector Motion"); dict.put(0x00540400, "Image ID"); dict.put(0x20100100, "Border Density"); return dict; } // -- Nested Classes -- public static class Metadata extends AbstractMetadata implements HasColorTable { // -- Fields -- byte[][] lut = null; short[][] shortLut = null; private ColorTable8 lut8; private ColorTable16 lut16; private long[] offsets = null; private boolean isJP2K = false; private boolean isJPEG = false; private boolean isRLE = false; private boolean isDeflate = false; private boolean oddLocations = false; private int maxPixelValue; private int imagesPerFile = 0; private double rescaleSlope = 1.0, rescaleIntercept = 0.0; private Hashtable<Integer, Vector<String>> fileList; private boolean inverted = false; private String pixelSizeX, pixelSizeY; private Double pixelSizeZ; private String date, time, imageType; private String originalDate, originalTime, originalInstance; private int originalSeries; private Vector<String> companionFiles = new Vector<String>(); // Getters and Setters public long[] getOffsets() { return offsets; } public void setOffsets(final long[] offsets) { this.offsets = offsets; } public double getRescaleSlope() { return rescaleSlope; } public void setRescaleSlope(final double rescaleSlope) { this.rescaleSlope = rescaleSlope; } public double getRescaleIntercept() { return rescaleIntercept; } public void setRescaleIntercept(final double rescaleIntercept) { this.rescaleIntercept = rescaleIntercept; } public String getPixelSizeX() { return pixelSizeX; } public void setPixelSizeX(final String pixelSizeX) { this.pixelSizeX = pixelSizeX; } public String getPixelSizeY() { return pixelSizeY; } public void setPixelSizeY(final String pixelSizeY) { this.pixelSizeY = pixelSizeY; } public Double getPixelSizeZ() { return pixelSizeZ; } public void setPixelSizeZ(final Double pixelSizeZ) { this.pixelSizeZ = pixelSizeZ; } public boolean isInverted() { return inverted; } public void setInverted(final boolean inverted) { this.inverted = inverted; } public boolean isJP2K() { return isJP2K; } public void setJP2K(final boolean isJP2K) { this.isJP2K = isJP2K; } public boolean isJPEG() { return isJPEG; } public void setJPEG(final boolean isJPEG) { this.isJPEG = isJPEG; } public boolean isRLE() { return isRLE; } public void setRLE(final boolean isRLE) { this.isRLE = isRLE; } public boolean isDeflate() { return isDeflate; } public void setDeflate(final boolean isDeflate) { this.isDeflate = isDeflate; } public boolean isOddLocations() { return oddLocations; } public void setOddLocations(final boolean oddLocations) { this.oddLocations = oddLocations; } public int getMaxPixelValue() { return maxPixelValue; } public void setMaxPixelValue(final int maxPixelValue) { this.maxPixelValue = maxPixelValue; } public int getImagesPerFile() { return imagesPerFile; } public void setImagesPerFile(final int imagesPerFile) { this.imagesPerFile = imagesPerFile; } public Hashtable<Integer, Vector<String>> getFileList() { return fileList; } public void setFileList(final Hashtable<Integer, Vector<String>> fileList) { this.fileList = fileList; } public String getDate() { return date; } public void setDate(final String date) { this.date = date; } public String getTime() { return time; } public void setTime(final String time) { this.time = time; } public String getImageType() { return imageType; } public void setImageType(final String imageType) { this.imageType = imageType; } public String getOriginalDate() { return originalDate; } public void setOriginalDate(final String originalDate) { this.originalDate = originalDate; } public String getOriginalTime() { return originalTime; } public void setOriginalTime(final String originalTime) { this.originalTime = originalTime; } public String getOriginalInstance() { return originalInstance; } public void setOriginalInstance(final String originalInstance) { this.originalInstance = originalInstance; } public int getOriginalSeries() { return originalSeries; } public void setOriginalSeries(final int originalSeries) { this.originalSeries = originalSeries; } public Vector<String> getCompanionFiles() { return companionFiles; } public void setCompanionFiles(final Vector<String> companionFiles) { this.companionFiles = companionFiles; } // -- ColorTable API Methods -- @Override public ColorTable getColorTable(final int imageIndex, final long planeIndex) { final int pixelType = get(0).getPixelType(); switch (pixelType) { case FormatTools.INT8: case FormatTools.UINT8: if (lut != null && lut8 == null) lut8 = new ColorTable8(lut); return lut8; case FormatTools.INT16: case FormatTools.UINT16: if (shortLut != null && lut16 == null) lut16 = new ColorTable16(shortLut); return lut16; } return null; } // -- Metadata API Methods -- @Override public void populateImageMetadata() { log().info("Populating metadata"); // TODO this isn't going to work because each parsing will // get the same filelist size and repeat infinitely final int seriesCount = fileList.size(); final Integer[] keys = fileList.keySet().toArray(new Integer[0]); Arrays.sort(keys); for (int i = 0; i < seriesCount; i++) { get(i).setAxisTypes(Axes.X, Axes.Y); int sizeZ = 0; if (seriesCount == 1) { sizeZ = getOffsets().length * fileList.get(keys[i]).size(); get(i).setMetadataComplete(true); get(i).setFalseColor(false); if (isRLE) { get(i).setAxisTypes(Axes.X, Axes.Y, Axes.CHANNEL); } if (get(i).getAxisLength(Axes.CHANNEL) > 1) { get(i).setPlanarAxisCount(3); } else { get(i).setPlanarAxisCount(2); } } else { try { final Parser p = (Parser) getFormat().createParser(); final Metadata m = p.parse(fileList.get(keys[i]).get(0), new SCIFIOConfig() .groupableSetGroupFiles(false)); add(m.get(0)); sizeZ *= fileList.get(keys[i]).size(); } catch (final IOException e) { log().error("Error creating Metadata from DICOM companion files.", e); } catch (final FormatException e) { log().error("Error creating Metadata from DICOM companion files.", e); } } get(i).setAxisLength(Axes.Z, sizeZ); } } // -- HasSource API Methods -- @Override public void close(final boolean fileOnly) throws IOException { super.close(fileOnly); if (!fileOnly) { oddLocations = false; isJPEG = isJP2K = isRLE = isDeflate = false; lut = null; offsets = null; shortLut = null; maxPixelValue = 0; rescaleSlope = 1.0; rescaleIntercept = 0.0; pixelSizeX = pixelSizeY = null; pixelSizeZ = null; imagesPerFile = 0; fileList = null; inverted = false; date = time = imageType = null; originalDate = originalTime = originalInstance = null; originalSeries = 0; // TODO the resetting is a bit too aggressive, perhaps it should just // clear out fields.. // companionFiles.clear(); } } } public static class Checker extends AbstractChecker { // -- Constants -- private static final String[] DICOM_SUFFIXES = { "dic", "dcm", "dicom", "j2ki", "j2kr" }; // -- Checker API Methods -- @Override public boolean suffixNecessary() { return false; } @Override public boolean suffixSufficient() { return false; } @Override public boolean isFormat(final String name, final SCIFIOConfig config) { // extension is sufficient as long as it is DIC, DCM, DICOM, J2KI, or J2KR if (FormatTools.checkSuffix(name, DICOM_SUFFIXES)) return true; return super.isFormat(name, config); } @Override public boolean isFormat(final RandomAccessInputStream stream) throws IOException { final int blockLen = 2048; if (!FormatTools.validStream(stream, blockLen, true)) return false; stream.seek(128); if (stream.readString(4).equals(DICOM_MAGIC_STRING)) return true; stream.seek(0); try { final int tag = DICOMUtils.getNextTag(stream).get(); return TYPES.get(tag) != null; } catch (final NullPointerException e) {} catch (final FormatException e) {} return false; } } public static class Parser extends AbstractParser<Metadata> { // -- Constants -- private static final int PIXEL_REPRESENTATION = 0x00280103; private static final int PIXEL_SIGN = 0x00281041; private static final int TRANSFER_SYNTAX_UID = 0x00020010; private static final int SLICE_SPACING = 0x00180088; private static final int SAMPLES_PER_PIXEL = 0x00280002; private static final int PHOTOMETRIC_INTERPRETATION = 0x00280004; private static final int PLANAR_CONFIGURATION = 0x00280006; private static final int NUMBER_OF_FRAMES = 0x00280008; private static final int ROWS = 0x00280010; private static final int COLUMNS = 0x00280011; private static final int PIXEL_SPACING = 0x00280030; private static final int BITS_ALLOCATED = 0x00280100; private static final int WINDOW_CENTER = 0x00281050; private static final int WINDOW_WIDTH = 0x00281051; private static final int RESCALE_INTERCEPT = 0x00281052; private static final int RESCALE_SLOPE = 0x00281053; private static final int ICON_IMAGE_SEQUENCE = 0x00880200; private static final int ITEM = 0xFFFEE000; private static final int ITEM_DELIMINATION = 0xFFFEE00D; private static final int SEQUENCE_DELIMINATION = 0xFFFEE0DD; private static final int PIXEL_DATA = 0x7FE00010; @Parameter private CodecService codecService; // -- Parser API Methods -- @Override public int fileGroupOption(final String id) throws FormatException, IOException { return FormatTools.CAN_GROUP; } @Override protected void typedParse(final RandomAccessInputStream stream, final Metadata meta, final SCIFIOConfig config) throws IOException, FormatException { meta.createImageMetadata(1); stream.order(true); final ImageMetadata iMeta = meta.get(0); // look for companion files final Vector<String> companionFiles = new Vector<String>(); attachCompanionFiles(companionFiles); meta.setCompanionFiles(companionFiles); int location = 0; boolean isJP2K = false; boolean isJPEG = false; boolean isRLE = false; boolean isDeflate = false; boolean oddLocations = false; int maxPixelValue = -1; int imagesPerFile = 0; boolean bigEndianTransferSyntax = false; long[] offsets = null; int sizeX = 0; int sizeY = 0; int bitsPerPixel = 0; boolean interleaved; // some DICOM files have a 128 byte header followed by a 4 byte identifier log().info("Verifying DICOM format"); final MetadataLevel level = config.parserGetLevel(); getSource().seek(128); if (getSource().readString(4).equals("DICM")) { if (level != MetadataLevel.MINIMUM) { // header exists, so we'll read it getSource().seek(0); meta.getTable() .put("Header information", getSource().readString(128)); getSource().skipBytes(4); } location = 128; } else getSource().seek(0); log().info("Reading tags"); long baseOffset = 0; boolean decodingTags = true; boolean signed = false; while (decodingTags) { if (getSource().getFilePointer() + 4 >= getSource().length()) { break; } log().debug("Reading tag from " + getSource().getFilePointer()); final DICOMTag tag = DICOMUtils.getNextTag(getSource(), bigEndianTransferSyntax, oddLocations); iMeta.setLittleEndian(tag.isLittleEndian()); if (tag.getElementLength() <= 0) continue; oddLocations = (location & 1) != 0; log().debug( " tag=" + tag.get() + " len=" + tag.getElementLength() + " fp=" + getSource().getFilePointer()); String s = null; switch (tag.get()) { case TRANSFER_SYNTAX_UID: // this tag can indicate which compression scheme is used s = getSource().readString(tag.getElementLength()); addInfo(meta, tag, s); if (s.startsWith("1.2.840.10008.1.2.4.9")) isJP2K = true; else if (s.startsWith("1.2.840.10008.1.2.4")) isJPEG = true; else if (s.startsWith("1.2.840.10008.1.2.5")) isRLE = true; else if (s.equals("1.2.8.10008.1.2.1.99")) isDeflate = true; else if (s.contains("1.2.4") || s.contains("1.2.5")) { throw new UnsupportedCompressionException( "Sorry, compression type " + s + " not supported"); } if (s.contains("1.2.840.10008.1.2.2")) { bigEndianTransferSyntax = true; } break; case NUMBER_OF_FRAMES: s = getSource().readString(tag.getElementLength()); addInfo(meta, tag, s); final double frames = Double.parseDouble(s); if (frames > 1.0) imagesPerFile = (int) frames; break; case SAMPLES_PER_PIXEL: addInfo(meta, tag, getSource().readShort()); break; case PLANAR_CONFIGURATION: final int configuration = getSource().readShort(); interleaved = configuration == 0; if (interleaved) { iMeta.setAxisTypes(Axes.CHANNEL, Axes.X, Axes.Y); iMeta.setPlanarAxisCount(3); } addInfo(meta, tag, configuration); break; case ROWS: if (sizeY == 0) { sizeY = getSource().readShort(); iMeta.addAxis(Axes.Y, sizeY); } else getSource().skipBytes(2); addInfo(meta, tag, sizeY); break; case COLUMNS: if (sizeX == 0) { sizeX = getSource().readShort(); iMeta.addAxis(Axes.X, sizeX); } else getSource().skipBytes(2); addInfo(meta, tag, sizeX); break; case PHOTOMETRIC_INTERPRETATION: case PIXEL_SPACING: case SLICE_SPACING: case RESCALE_INTERCEPT: case WINDOW_CENTER: case RESCALE_SLOPE: addInfo(meta, tag, getSource().readString(tag.getElementLength())); break; case BITS_ALLOCATED: if (bitsPerPixel == 0) bitsPerPixel = getSource().readShort(); else getSource().skipBytes(2); addInfo(meta, tag, bitsPerPixel); break; case PIXEL_REPRESENTATION: case PIXEL_SIGN: final short ss = getSource().readShort(); signed = ss == 1; addInfo(meta, tag, ss); break; case 537262910: case WINDOW_WIDTH: final String t = getSource().readString(tag.getElementLength()); if (t.trim().length() == 0) maxPixelValue = -1; else { try { maxPixelValue = new Double(t.trim()).intValue(); } catch (final NumberFormatException e) { maxPixelValue = -1; } } addInfo(meta, tag, t); break; case PIXEL_DATA: case ITEM: case 0xffee000: if (tag.getElementLength() != 0) { baseOffset = getSource().getFilePointer(); addInfo(meta, tag, location); decodingTags = false; } else addInfo(meta, tag, null); break; case 0x7f880010: if (tag.getElementLength() != 0) { baseOffset = location + 4; decodingTags = false; } break; case 0x7fe00000: getSource().skipBytes(tag.getElementLength()); break; case 0: getSource().seek(getSource().getFilePointer() - 4); break; default: final long oldfp = getSource().getFilePointer(); addInfo(meta, tag, s); getSource().seek(oldfp + tag.getElementLength()); } if (getSource().getFilePointer() >= (getSource().length() - 4)) { decodingTags = false; } } if (imagesPerFile == 0) imagesPerFile = 1; int bpp = bitsPerPixel; while (bitsPerPixel % 8 != 0) bitsPerPixel++; if (bitsPerPixel == 24 || bitsPerPixel == 48) { bitsPerPixel /= 3; bpp /= 3; } final int pixelType = FormatTools.pixelTypeFromBytes(bitsPerPixel / 8, signed, false); iMeta.setBitsPerPixel(bpp); iMeta.setPixelType(pixelType); final int bytesPerPixel = FormatTools.getBytesPerPixel(pixelType); final int planeSize = sizeX * sizeY * (int) (meta.getColorTable(0, 0) == null ? meta.get(0).getAxisLength( Axes.CHANNEL) : 1) * bytesPerPixel; meta.setJP2K(isJP2K); meta.setJPEG(isJPEG); meta.setImagesPerFile(imagesPerFile); meta.setRLE(isRLE); meta.setDeflate(isDeflate); meta.setMaxPixelValue(maxPixelValue); meta.setOddLocations(oddLocations); log().info("Calculating image offsets"); // calculate the offset to each plane getSource().seek(baseOffset - 12); final int len = getSource().readInt(); if (len >= 0 && len + getSource().getFilePointer() < getSource().length()) { getSource().skipBytes(len); final int check = getSource().readShort() & 0xffff; if (check == 0xfffe) { baseOffset = getSource().getFilePointer() + 2; } } offsets = new long[imagesPerFile]; meta.setOffsets(offsets); for (int i = 0; i < imagesPerFile; i++) { if (isRLE) { if (i == 0) getSource().seek(baseOffset); else { getSource().seek(offsets[i - 1]); final CodecOptions options = new CodecOptions(); options.maxBytes = planeSize / bytesPerPixel; for (int q = 0; q < bytesPerPixel; q++) { final PackbitsCodec codec = codecService.getCodec(PackbitsCodec.class); codec.decompress(getSource(), options); while (getSource().read() == 0) { /* Read to non-0 data */} getSource().seek(getSource().getFilePointer() - 1); } } getSource().skipBytes(i == 0 ? 64 : 53); while (getSource().read() == 0) { /* Read to non-0 data */} offsets[i] = getSource().getFilePointer() - 1; } else if (isJPEG || isJP2K) { // scan for next JPEG magic byte sequence if (i == 0) offsets[i] = baseOffset; else offsets[i] = offsets[i - 1] + 3; final byte secondCheck = isJPEG ? (byte) 0xd8 : (byte) 0x4f; getSource().seek(offsets[i]); final byte[] buf = new byte[8192]; int n = getSource().read(buf); boolean found = false; while (!found) { for (int q = 0; q < n - 2; q++) { if (buf[q] == (byte) 0xff && buf[q + 1] == secondCheck && buf[q + 2] == (byte) 0xff) { if (isJPEG || (isJP2K && buf[q + 3] == 0x51)) { found = true; offsets[i] = getSource().getFilePointer() + q - n; break; } } } if (!found) { for (int q = 0; q < 4; q++) { buf[q] = buf[buf.length + q - 4]; } n = getSource().read(buf, 4, buf.length - 4) + 4; } } } else offsets[i] = baseOffset + planeSize * i; } makeFileList(config); } @Override public String[] getImageUsedFiles(final int imageIndex, final boolean noPixels) { FormatTools.assertId(getSource(), true, 1); if (noPixels || getMetadata().getFileList() == null) return null; final Integer[] keys = getMetadata().getFileList().keySet().toArray(new Integer[0]); Arrays.sort(keys); final Vector<String> files = getMetadata().getFileList().get(keys[imageIndex]); for (final String f : getMetadata().getCompanionFiles()) { files.add(f); } return files == null ? null : files.toArray(new String[files.size()]); } // -- Helper methods -- private void makeFileList(final SCIFIOConfig config) throws FormatException, IOException { log().info("Building file list"); if (getMetadata().getFileList() == null && getMetadata().getOriginalInstance() != null && getMetadata().getOriginalDate() != null && getMetadata().getOriginalTime() != null && config.groupableIsGroupFiles()) { final Hashtable<Integer, Vector<String>> fileList = new Hashtable<Integer, Vector<String>>(); final Integer s = new Integer(getMetadata().getOriginalSeries()); fileList.put(s, new Vector<String>()); final int instanceNumber = Integer.parseInt(getMetadata().getOriginalInstance()) - 1; if (instanceNumber == 0) fileList.get(s).add(getSource().getFileName()); else { while (instanceNumber > fileList.get(s).size()) { fileList.get(s).add(null); } fileList.get(s).add(getSource().getFileName()); } // look for matching files in the current directory final Location currentFile = new Location(getContext(), getSource().getFileName()) .getAbsoluteFile(); Location directory = currentFile.getParentFile(); scanDirectory(directory, false); // move up a directory and look for other directories that // could contain matching files directory = directory.getParentFile(); final String[] subdirs = directory.list(true); if (subdirs != null) { for (final String subdir : subdirs) { final Location f = new Location(getContext(), directory, subdir).getAbsoluteFile(); if (!f.isDirectory()) continue; scanDirectory(f, true); } } final Integer[] keys = fileList.keySet().toArray(new Integer[0]); Arrays.sort(keys); for (final Integer key : keys) { for (int j = 0; j < fileList.get(key).size(); j++) { if (fileList.get(key).get(j) == null) { fileList.get(key).remove(j); j--; } } } getMetadata().setFileList(fileList); } else if (getMetadata().getFileList() == null) { final Hashtable<Integer, Vector<String>> fileList = new Hashtable<Integer, Vector<String>>(); fileList.put(0, new Vector<String>()); fileList.get(0).add(getSource().getFileName()); getMetadata().setFileList(fileList); } } /** * DICOM datasets produced by: * http://www.ct-imaging.de/index.php/en/ct-systeme-e/mikro-ct-e.html * contain a bunch of extra metadata and log files. We do not parse these * extra files, but do locate and attach them to the DICOM file(s). */ private void attachCompanionFiles(final Vector<String> companionFiles) { final Location parent = new Location(getContext(), getSource().getFileName()).getAbsoluteFile() .getParentFile(); final Location grandparent = parent.getParentFile(); if (new Location(getContext(), grandparent, parent.getName() + ".mif") .exists()) { final String[] list = grandparent.list(true); for (final String f : list) { final Location file = new Location(getContext(), grandparent, f); if (!file.isDirectory()) { companionFiles.add(file.getAbsolutePath()); } } } } /** * Scan the given directory for files that belong to this dataset. */ private void scanDirectory(final Location dir, final boolean checkSeries) throws FormatException, IOException { final Location currentFile = new Location(getContext(), getSource().getFileName()).getAbsoluteFile(); final FilePattern pattern = new FilePattern(getContext(), currentFile.getName(), dir .getAbsolutePath()); String[] patternFiles = pattern.getFiles(); if (patternFiles == null) patternFiles = new String[0]; Arrays.sort(patternFiles); final String[] files = dir.list(true); if (files == null) return; Arrays.sort(files); for (final String f : files) { final String file = new Location(getContext(), dir, f).getAbsolutePath(); log().debug("Checking file " + file); if (!f.equals(getSource().getFileName()) && !file.equals(getSource().getFileName()) && getFormat().createChecker().isFormat(file) && Arrays.binarySearch(patternFiles, file) >= 0) { addFileToList(file, checkSeries); } } } /** * Determine if the given file belongs in the same dataset as this file. */ private void addFileToList(final String file, final boolean checkSeries) throws FormatException, IOException { final RandomAccessInputStream stream = new RandomAccessInputStream(getContext(), file); if (!getFormat().createChecker().isFormat(stream)) { stream.close(); return; } stream.order(true); stream.seek(128); if (!stream.readString(4).equals("DICM")) stream.seek(0); int fileSeries = -1; String date = null, time = null, instance = null; while (date == null || time == null || instance == null || (checkSeries && fileSeries < 0)) { final long fp = stream.getFilePointer(); if (fp + 4 >= stream.length() || fp < 0) break; final DICOMTag tag = DICOMUtils.getNextTag(stream); final String key = TYPES.get(new Integer(tag.get())); if ("Instance Number".equals(key)) { instance = stream.readString(tag.getElementLength()).trim(); if (instance.length() == 0) instance = null; } else if ("Acquisition Time".equals(key)) { time = stream.readString(tag.getElementLength()); } else if ("Acquisition Date".equals(key)) { date = stream.readString(tag.getElementLength()); } else if ("Series Number".equals(key)) { fileSeries = Integer.parseInt(stream.readString(tag.getElementLength()).trim()); } else stream.skipBytes(tag.getElementLength()); } stream.close(); if (date == null || time == null || instance == null || (checkSeries && fileSeries == getMetadata().getOriginalSeries())) { return; } int stamp = 0; try { stamp = Integer.parseInt(time); } catch (final NumberFormatException e) {} int timestamp = 0; try { timestamp = Integer.parseInt(getMetadata().getOriginalTime()); } catch (final NumberFormatException e) {} if (date.equals(getMetadata().getOriginalDate()) && (Math.abs(stamp - timestamp) < 150)) { int position = Integer.parseInt(instance) - 1; if (position < 0) position = 0; final Hashtable<Integer, Vector<String>> fileList = getMetadata().getFileList(); if (fileList.get(fileSeries) == null) { fileList.put(fileSeries, new Vector<String>()); } if (position < fileList.get(fileSeries).size()) { while (position < fileList.get(fileSeries).size() && fileList.get(fileSeries).get(position) != null) { position++; } if (position < fileList.get(fileSeries).size()) { fileList.get(fileSeries).setElementAt(file, position); } else fileList.get(fileSeries).add(file); } else { while (position > fileList.get(fileSeries).size()) { fileList.get(fileSeries).add(null); } fileList.get(fileSeries).add(file); } } } private void addInfo(final Metadata meta, final DICOMTag tag, final String value) throws IOException { final String oldValue = value; String info = getHeaderInfo(tag, value); if (info != null && tag.get() != ITEM) { info = info.trim(); if (info.equals("")) info = oldValue == null ? "" : oldValue.trim(); String key = TYPES.get(tag.get()); if (key == null) { key = formatTag(tag.get()); } if (key.equals("Samples per pixel")) { final int sizeC = Integer.parseInt(info); if (sizeC > 1) { meta.get(0).setAxisLength(Axes.CHANNEL, sizeC); meta.get(0).setPlanarAxisCount(2); } } else if (key.equals("Photometric Interpretation")) { if (info.equals("PALETTE COLOR")) { meta.get(0).setIndexed(true); meta.get(0).setAxisLength(Axes.CHANNEL, 1); meta.lut = new byte[3][]; meta.shortLut = new short[3][]; } else if (info.startsWith("MONOCHROME")) { meta.setInverted(info.endsWith("1")); } } else if (key.equals("Acquisition Date")) meta.setOriginalDate(info); else if (key.equals("Acquisition Time")) meta.setOriginalTime(info); else if (key.equals("Instance Number")) { if (info.trim().length() > 0) { meta.setOriginalInstance(info); } } else if (key.equals("Series Number")) { try { meta.setOriginalSeries(Integer.parseInt(info)); } catch (final NumberFormatException e) {} } else if (key.contains("Palette Color LUT Data")) { final String color = key.substring(0, key.indexOf(" ")).trim(); final int ndx = color.equals("Red") ? 0 : color.equals("Green") ? 1 : 2; final long fp = getSource().getFilePointer(); getSource().seek( getSource().getFilePointer() - tag.getElementLength() + 1); meta.shortLut[ndx] = new short[tag.getElementLength() / 2]; meta.lut[ndx] = new byte[tag.getElementLength() / 2]; for (int i = 0; i < meta.lut[ndx].length; i++) { meta.shortLut[ndx][i] = getSource().readShort(); meta.lut[ndx][i] = (byte) (meta.shortLut[ndx][i] & 0xff); } getSource().seek(fp); } else if (key.equals("Content Time")) meta.setTime(info); else if (key.equals("Content Date")) meta.setDate(info); else if (key.equals("Image Type")) meta.setImageType(info); else if (key.equals("Rescale Intercept")) { meta.setRescaleIntercept(Double.parseDouble(info)); } else if (key.equals("Rescale Slope")) { meta.setRescaleSlope(Double.parseDouble(info)); } else if (key.equals("Pixel Spacing")) { meta.setPixelSizeX(info.substring(0, info.indexOf("\\"))); meta.setPixelSizeY(info.substring(info.lastIndexOf("\\") + 1)); } else if (key.equals("Spacing Between Slices")) { meta.setPixelSizeZ(new Double(info)); } if (((tag.get() & 0xffff0000) >> 16) != 0x7fe0) { key = formatTag(tag.get()) + " " + key; final int imageIndex = meta.getImageCount() - 1; Object v; if ((v = meta.get(imageIndex).getTable().get(key)) != null) { // make sure that values are not overwritten meta.get(imageIndex).getTable().remove(key); meta.get(imageIndex).getTable().putList(key, v); meta.get(imageIndex).getTable().putList(key, info); } else { meta.get(imageIndex).getTable().put(key, info); } } } } private String formatTag(final int tag) { String s = Integer.toHexString(tag); while (s.length() < 8) { s = "0" + s; } return s.substring(0, 4) + "," + s.substring(4); } private void addInfo(final Metadata meta, final DICOMTag tag, final int value) throws IOException { addInfo(meta, tag, Integer.toString(value)); } private String getHeaderInfo(final DICOMTag tag, String value) throws IOException { if (tag.get() == ITEM_DELIMINATION || tag.get() == SEQUENCE_DELIMINATION) { tag.setInSequence(false); } String id = TYPES.get(new Integer(tag.get())); int vr = tag.getVR(); if (id != null) { if (vr == DICOMUtils.IMPLICIT_VR) { vr = (id.charAt(0) << 8) + id.charAt(1); tag.setVR(vr); } if (id.length() > 2) id = id.substring(2); } if (tag.get() == ITEM) return id != null ? id : null; if (value != null) return value; boolean skip = false; switch (vr) { case DICOMUtils.AE: case DICOMUtils.AS: case DICOMUtils.CS: case DICOMUtils.DA: case DICOMUtils.DS: case DICOMUtils.DT: case DICOMUtils.IS: case DICOMUtils.LO: case DICOMUtils.LT: case DICOMUtils.PN: case DICOMUtils.SH: case DICOMUtils.ST: case DICOMUtils.TM: case DICOMUtils.UI: value = getSource().readString(tag.getElementLength()); break; case DICOMUtils.AT: // Cannot fix element length to 4, because AT value representation is // always // 4 bytes long (DICOM specs PS3.5 §6.2), but value multiplicity is // 1-n byte[] bytes = new byte[tag.getElementLength()]; // Read from stream getSource().readFully(bytes); // If little endian, swap bytes to get a string with a user friendly // representation of tag group and tag element if (tag.littleEndian) { for (int i = 0; i < bytes.length / 2; ++i) { byte t = bytes[2 * i]; bytes[2 * i] = bytes[2 * i + 1]; bytes[2 * i + 1] = t; } } // Convert the bytes to a string value = DataTools.bytesToHex(bytes); break; case DICOMUtils.US: if (tag.getElementLength() == 2) value = Integer.toString(getSource().readShort()); else { value = ""; final int n = tag.getElementLength() / 2; for (int i = 0; i < n; i++) { value += Integer.toString(getSource().readShort()) + " "; } } break; case DICOMUtils.IMPLICIT_VR: value = getSource().readString(tag.getElementLength()); if (tag.getElementLength() <= 4 || tag.getElementLength() > 44) value = null; break; case DICOMUtils.SQ: value = ""; final boolean privateTag = ((tag.getElementLength() >> 16) & 1) != 0; if (tag.get() == ICON_IMAGE_SEQUENCE || privateTag) skip = true; break; default: skip = true; } if (skip) { final long skipCount = tag.getElementLength(); if (getSource().getFilePointer() + skipCount <= getSource().length()) { getSource().skipBytes((int) skipCount); } tag.addLocation(tag.getElementLength()); value = ""; } if (value != null && id == null && !value.equals("")) return value; else if (id == null) return null; else return value; } } public static class Reader extends ByteArrayReader<Metadata> { @Parameter private InitializeService initializeService; @Parameter private CodecService codecService; // -- AbstractReader API Methods -- @Override protected String[] createDomainArray() { return new String[] { FormatTools.MEDICAL_DOMAIN }; } // -- Reader API Methods -- @Override public boolean hasCompanionFiles() { return true; } @Override public ByteArrayPlane openPlane(final int imageIndex, long planeIndex, final ByteArrayPlane plane, final long[] planeMin, final long[] planeMax, final SCIFIOConfig config) throws FormatException, IOException { final Metadata meta = getMetadata(); plane.setColorTable(meta.getColorTable(imageIndex, planeIndex)); FormatTools.checkPlaneForReading(meta, imageIndex, planeIndex, plane .getData().length, planeMin, planeMax); final int xAxis = meta.get(imageIndex).getAxisIndex(Axes.X); final int yAxis = meta.get(imageIndex).getAxisIndex(Axes.Y); final int x = (int) planeMin[xAxis], y = (int) planeMin[yAxis], w = (int) planeMax[xAxis], h = (int) planeMax[yAxis]; final Hashtable<Integer, Vector<String>> fileList = meta.getFileList(); final Integer[] keys = fileList.keySet().toArray(new Integer[0]); Arrays.sort(keys); if (fileList.get(keys[imageIndex]).size() > 1) { final int fileNumber = (int) (planeIndex / meta.getImagesPerFile()); planeIndex = planeIndex % meta.getImagesPerFile(); final String file = fileList.get(keys[imageIndex]).get(fileNumber); final io.scif.Reader r = initializeService.initializeReader(file); return (ByteArrayPlane) r.openPlane(imageIndex, planeIndex, plane, planeMin, planeMax, config); } final int ec = meta.get(0).isIndexed() ? 1 : (int) meta.get(imageIndex).getAxisLength( Axes.CHANNEL); final int bpp = FormatTools.getBytesPerPixel(meta.get(imageIndex).getPixelType()); final int bytes = (int) (meta.get(imageIndex).getAxisLength(Axes.X) * meta.get(imageIndex).getAxisLength(Axes.Y) * bpp * ec); getStream().seek(meta.getOffsets()[(int) planeIndex]); if (meta.isRLE()) { // plane is compressed using run-length encoding final CodecOptions options = new CodecOptions(); options.maxBytes = (int) (meta.get(imageIndex).getAxisLength(Axes.X) * meta.get( imageIndex).getAxisLength(Axes.Y)); final PackbitsCodec codec = codecService.getCodec(PackbitsCodec.class); for (int c = 0; c < ec; c++) { byte[] t = null; if (bpp > 1) { // TODO unused int planeSize = bytes / (bpp * ec); final byte[][] tmp = new byte[bpp][]; for (int i = 0; i < bpp; i++) { tmp[i] = codec.decompress(getStream(), options); if (planeIndex < meta.getImagesPerFile() - 1 || i < bpp - 1) { while (getStream().read() == 0) { /* Read to non-0 data */} getStream().seek(getStream().getFilePointer() - 1); } } t = new byte[bytes / ec]; for (int i = 0; i < planeIndex; i++) { for (int j = 0; j < bpp; j++) { final int byteIndex = meta.get(imageIndex).isLittleEndian() ? bpp - j - 1 : j; if (i < tmp[byteIndex].length) { t[i * bpp + j] = tmp[byteIndex][i]; } } } } else { t = codec.decompress(getStream(), options); if (t.length < (bytes / ec)) { final byte[] tmp = t; t = new byte[bytes / ec]; System.arraycopy(tmp, 0, t, 0, tmp.length); } if (planeIndex < meta.getImagesPerFile() - 1 || c < ec - 1) { while (getStream().read() == 0) { /* Read to non-0 data */} getStream().seek(getStream().getFilePointer() - 1); } } final int rowLen = w * bpp; final int srcRowLen = (int) meta.get(imageIndex).getAxisLength(Axes.X) * bpp; // TODO unused int srcPlane = meta.getAxisLength(imageIndex, Axes.Y) * // srcRowLen; for (int row = 0; row < h; row++) { final int src = (row + y) * srcRowLen + x * bpp; final int dest = (h * c + row) * rowLen; final int len = Math.min(rowLen, t.length - src - 1); if (len < 0) break; System.arraycopy(t, src, plane.getBytes(), dest, len); } } } else if (meta.isJPEG() || meta.isJP2K()) { // plane is compressed using JPEG or JPEG-2000 final long end = planeIndex < meta.getOffsets().length - 1 ? meta.getOffsets()[(int) planeIndex + 1] : getStream().length(); byte[] b = new byte[(int) (end - getStream().getFilePointer())]; getStream().read(b); if (b[2] != (byte) 0xff) { final byte[] tmp = new byte[b.length + 1]; tmp[0] = b[0]; tmp[1] = b[1]; tmp[2] = (byte) 0xff; System.arraycopy(b, 2, tmp, 3, b.length - 2); b = tmp; } if ((b[3] & 0xff) >= 0xf0) { b[3] -= (byte) 0x30; } int pt = b.length - 2; while (pt >= 0 && b[pt] != (byte) 0xff || b[pt + 1] != (byte) 0xd9) { pt--; } if (pt < b.length - 2) { final byte[] tmp = b; b = new byte[pt + 2]; System.arraycopy(tmp, 0, b, 0, b.length); } final CodecOptions options = new CodecOptions(); options.littleEndian = meta.get(imageIndex).isLittleEndian(); options.interleaved = meta.get(imageIndex).getInterleavedAxisCount() > 0; final Codec codec = codecService.getCodec(meta.isJPEG() ? JPEGCodec.class : JPEG2000Codec.class); b = codec.decompress(b, options); final int rowLen = w * bpp; final int srcRowLen = (int) meta.get(imageIndex).getAxisLength(Axes.X) * bpp; final int srcPlane = (int) meta.get(imageIndex).getAxisLength(Axes.Y) * srcRowLen; for (int c = 0; c < ec; c++) { for (int row = 0; row < h; row++) { System.arraycopy(b, c * srcPlane + (row + y) * srcRowLen + x * bpp, plane.getBytes(), h * rowLen * c + row * rowLen, rowLen); } } } else if (meta.isDeflate()) { // TODO throw new UnsupportedCompressionException( "Deflate data is not supported."); } else { // plane is not compressed readPlane(getStream(), imageIndex, planeMin, planeMax, plane); } if (meta.isInverted()) { // pixels are stored such that white -> 0; invert the values so that // white -> 255 (or 65535) if (bpp == 1) { for (int i = 0; i < plane.getBytes().length; i++) { plane.getBytes()[i] = (byte) (255 - plane.getBytes()[i]); } } else if (bpp == 2) { if (meta.getMaxPixelValue() == -1) meta.setMaxPixelValue(65535); final boolean little = meta.get(imageIndex).isLittleEndian(); for (int i = 0; i < plane.getBytes().length; i += 2) { final short s = DataTools.bytesToShort(plane.getBytes(), i, 2, little); DataTools.unpackBytes(meta.getMaxPixelValue() - s, plane.getBytes(), i, 2, little); } } } // NB: do *not* apply the rescale function return plane; } } // -- DICOM Helper Classes -- private static class DICOMUtils { private static final int AE = 0x4145, AS = 0x4153, AT = 0x4154, CS = 0x4353; private static final int DA = 0x4441, DS = 0x4453, DT = 0x4454, FD = 0x4644; private static final int FL = 0x464C, IS = 0x4953, LO = 0x4C4F, LT = 0x4C54; private static final int PN = 0x504E, SH = 0x5348, SL = 0x534C, SS = 0x5353; private static final int ST = 0x5354, TM = 0x544D, UI = 0x5549, UL = 0x554C; private static final int US = 0x5553, UT = 0x5554, OB = 0x4F42, OW = 0x4F57; private static final int SQ = 0x5351, UN = 0x554E, QQ = 0x3F3F; private static final int IMPLICIT_VR = 0x2d2d; private static DICOMTag getNextTag(final RandomAccessInputStream stream) throws FormatException, IOException { return getNextTag(stream, false); } private static DICOMTag getNextTag(final RandomAccessInputStream stream, final boolean bigEndianTransferSyntax) throws FormatException, IOException { return getNextTag(stream, bigEndianTransferSyntax, false); } private static DICOMTag getNextTag(final RandomAccessInputStream stream, final boolean bigEndianTransferSyntax, final boolean isOddLocations) throws FormatException, IOException { final long fp = stream.getFilePointer(); int groupWord = stream.readShort() & 0xffff; final DICOMTag diTag = new DICOMTag(); boolean littleEndian = true; if (groupWord == 0x0800 && bigEndianTransferSyntax) { littleEndian = false; groupWord = 0x0008; stream.order(false); } else if (groupWord == 0xfeff || groupWord == 0xfffe) { stream.skipBytes(6); return DICOMUtils.getNextTag(stream, bigEndianTransferSyntax); } int elementWord = stream.readShort(); int tag = ((groupWord << 16) & 0xffff0000) | (elementWord & 0xffff); diTag.setElementLength(getLength(stream, diTag)); if (diTag.getElementLength() > stream.length()) { stream.seek(fp); littleEndian = !littleEndian; stream.order(littleEndian); groupWord = stream.readShort() & 0xffff; elementWord = stream.readShort(); tag = ((groupWord << 16) & 0xffff0000) | (elementWord & 0xffff); diTag.setElementLength(getLength(stream, diTag)); if (diTag.getElementLength() > stream.length()) { throw new FormatException("Invalid tag length " + diTag.getElementLength()); } diTag.setTagValue(tag); return diTag; } if (diTag.getElementLength() < 0 && groupWord == 0x7fe0) { stream.skipBytes(12); diTag.setElementLength(stream.readInt()); if (diTag.getElementLength() < 0) diTag.setElementLength(stream .readInt()); } if (diTag.getElementLength() == 0 && (groupWord == 0x7fe0 || tag == 0x291014)) { diTag.setElementLength(getLength(stream, diTag)); } else if (diTag.getElementLength() == 0) { stream.seek(stream.getFilePointer() - 4); final String v = stream.readString(2); if (v.equals("UT")) { stream.skipBytes(2); diTag.setElementLength(stream.readInt()); } else stream.skipBytes(2); } // HACK - needed to read some GE files // The element length must be even! if (!isOddLocations && (diTag.getElementLength() % 2) == 1) diTag .incrementElementLength(); // "Undefined" element length. // This is a sort of bracket that encloses a sequence of elements. if (diTag.getElementLength() == -1) { diTag.setElementLength(0); diTag.setInSequence(true); } diTag.setTagValue(tag); diTag.setLittleEndian(littleEndian); return diTag; } private static int getLength(final RandomAccessInputStream stream, final DICOMTag tag) throws IOException { final byte[] b = new byte[4]; stream.read(b); // We cannot know whether the VR is implicit or explicit // without the full DICOM Data Dictionary for public and // private groups. // We will assume the VR is explicit if the two bytes // match the known codes. It is possible that these two // bytes are part of a 32-bit length for an implicit VR. final int vr = ((b[0] & 0xff) << 8) | (b[1] & 0xff); tag.setVR(vr); switch (vr) { case OB: case OW: case SQ: case UN: // Explicit VR with 32-bit length if other two bytes are zero if ((b[2] == 0) || (b[3] == 0)) { return stream.readInt(); } tag.setVR(IMPLICIT_VR); return DataTools.bytesToInt(b, stream.isLittleEndian()); case AE: case AS: case AT: case CS: case DA: case DS: case DT: case FD: case FL: case IS: case LO: case LT: case PN: case SH: case SL: case SS: case ST: case TM: case UI: case UL: case US: case UT: case QQ: // Explicit VR with 16-bit length if (tag.get() == 0x00283006) { return DataTools.bytesToInt(b, 2, 2, stream.isLittleEndian()); } int n1 = DataTools.bytesToShort(b, 2, 2, stream.isLittleEndian()); int n2 = DataTools.bytesToShort(b, 2, 2, !stream.isLittleEndian()); n1 &= 0xffff; n2 &= 0xffff; if (n1 < 0 || n1 + stream.getFilePointer() > stream.length()) return n2; if (n2 < 0 || n2 + stream.getFilePointer() > stream.length()) return n1; return n1; case 0xffff: tag.setVR(IMPLICIT_VR); return 8; default: tag.setVR(IMPLICIT_VR); int len = DataTools.bytesToInt(b, stream.isLittleEndian()); if (len + stream.getFilePointer() > stream.length() || len < 0) { len = DataTools.bytesToInt(b, 2, 2, stream.isLittleEndian()); len &= 0xffff; } return len; } } } public static class DICOMTag { private int elementLength = 0; private int tagValue; private int vr = 0; private boolean inSequence = false; private int location = 0; private boolean littleEndian; public int getLocation() { return location; } public void setLocation(final int location) { this.location = location; } public void addLocation(final int offset) { location += offset; } public int getVR() { return vr; } public void setVR(final int vr) { this.vr = vr; } public int getElementLength() { return elementLength; } public void setElementLength(final int elementLength) { this.elementLength = elementLength; } public void incrementElementLength() { elementLength++; } public int get() { return tagValue; } public void setTagValue(final int tagValue) { this.tagValue = tagValue; } public boolean isInSequence() { return inSequence; } public void setInSequence(final boolean inSequence) { this.inSequence = inSequence; } public boolean isLittleEndian() { return littleEndian; } public void setLittleEndian(final boolean littleEndian) { this.littleEndian = littleEndian; } } }
DICOMFormat: reorder switch statement To be alphabetical
src/main/java/io/scif/formats/DICOMFormat.java
DICOMFormat: reorder switch statement
<ide><path>rc/main/java/io/scif/formats/DICOMFormat.java <ide> switch (vr) { <ide> case DICOMUtils.AE: <ide> case DICOMUtils.AS: <del> case DICOMUtils.CS: <del> case DICOMUtils.DA: <del> case DICOMUtils.DS: <del> case DICOMUtils.DT: <del> case DICOMUtils.IS: <del> case DICOMUtils.LO: <del> case DICOMUtils.LT: <del> case DICOMUtils.PN: <del> case DICOMUtils.SH: <del> case DICOMUtils.ST: <del> case DICOMUtils.TM: <del> case DICOMUtils.UI: <del> value = getSource().readString(tag.getElementLength()); <del> break; <ide> case DICOMUtils.AT: <ide> // Cannot fix element length to 4, because AT value representation is <ide> // always <ide> } <ide> // Convert the bytes to a string <ide> value = DataTools.bytesToHex(bytes); <add> break; <add> case DICOMUtils.CS: <add> case DICOMUtils.DA: <add> case DICOMUtils.DS: <add> case DICOMUtils.DT: <add> case DICOMUtils.IS: <add> case DICOMUtils.LO: <add> case DICOMUtils.LT: <add> case DICOMUtils.PN: <add> case DICOMUtils.SH: <add> case DICOMUtils.ST: <add> case DICOMUtils.TM: <add> case DICOMUtils.UI: <add> value = getSource().readString(tag.getElementLength()); <ide> break; <ide> case DICOMUtils.US: <ide> if (tag.getElementLength() == 2) value =
Java
apache-2.0
5d82ac220c7e8c2c19e7ab1be3f3ed61a0aa22b0
0
ruilyzhu/android-utils,Heart2009/android-utils,Ryan800/android-utils,jingle1267/android-utils,VikingDen/android-utils,huhu2008/android-utils,SoftwareME/android-utils,chaoallsome/android-utils,tsdl2013/android-utils,mumer92/android-utils-1
/** * Copyright 2014 Zhenguo Jin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.worthed.utils; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; import android.graphics.Paint; import android.graphics.Paint.FontMetrics; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.GridView; import android.widget.LinearLayout; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.RelativeLayout.LayoutParams; import android.widget.TextView; /** * ViewUtils * <ul> * <strong>get view height</strong> * <li>{@link ViewUtils#getListViewHeightBasedOnChildren(ListView)}</li> * <li>{@link ViewUtils#getAbsListViewHeightBasedOnChildren(AbsListView)}</li> * </ul> * <ul> * <strong>set view height</strong> * <li>{@link ViewUtils#setViewHeight(View, int)} set view height</li> * <li>{@link ViewUtils#setListViewHeightBasedOnChildren(ListView)}</li> * <li>{@link ViewUtils#setAbsListViewHeightBasedOnChildren(AbsListView)}</li> * </ul> * <ul> * <strong>get other info</strong> * <li>{@link ViewUtils#getGridViewVerticalSpacing(GridView)} get GridView * vertical spacing</li> * </ul> * <ul> * <strong>set other info</strong> * <li>{@link ViewUtils#setSearchViewOnClickListener(View, OnClickListener)}</li> * </ul> * * @author [email protected] */ public class ViewUtils { /** * get ListView height according to every children * * @param view * @return */ public static int getListViewHeightBasedOnChildren(ListView view) { int height = getAbsListViewHeightBasedOnChildren(view); ListAdapter adapter; int adapterCount; if (view != null && (adapter = view.getAdapter()) != null && (adapterCount = adapter.getCount()) > 0) { height += view.getDividerHeight() * (adapterCount - 1); } return height; } private static final String CLASS_NAME_GRID_VIEW = "android.widget.GridView"; private static final String FIELD_NAME_VERTICAL_SPACING = "mVerticalSpacing"; /** * get GridView vertical spacing * * @param view * @return */ public static int getGridViewVerticalSpacing(GridView view) { // get mVerticalSpacing by android.widget.GridView Class<?> demo = null; int verticalSpacing = 0; try { demo = Class.forName(CLASS_NAME_GRID_VIEW); Field field = demo.getDeclaredField(FIELD_NAME_VERTICAL_SPACING); field.setAccessible(true); verticalSpacing = (Integer) field.get(view); return verticalSpacing; } catch (Exception e) { /** * accept all exception, include ClassNotFoundException, * NoSuchFieldException, InstantiationException, * IllegalArgumentException, IllegalAccessException, * NullPointException */ e.printStackTrace(); } return verticalSpacing; } /** * get AbsListView height according to every children * * @param view * @return */ public static int getAbsListViewHeightBasedOnChildren(AbsListView view) { ListAdapter adapter; if (view == null || (adapter = view.getAdapter()) == null) { return 0; } int height = 0; for (int i = 0; i < adapter.getCount(); i++) { View item = adapter.getView(i, null, view); if (item instanceof ViewGroup) { item.setLayoutParams(new LayoutParams( LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT)); } item.measure(0, 0); height += item.getMeasuredHeight(); } height += view.getPaddingTop() + view.getPaddingBottom(); return height; } /** * get Font height * @param view * @return */ public static int getFontHeight(TextView view) { Paint paint = new Paint(); paint.setTextSize(view.getTextSize()); FontMetrics fm = paint.getFontMetrics(); return (int) (Math.ceil(fm.descent - fm.ascent)); } /** * set view height * * @param view * @param height */ public static void setViewHeight(View view, int height) { if (view == null) { return; } ViewGroup.LayoutParams params = view.getLayoutParams(); params.height = height; } // /** // * set GistView height which is calculated by {@link # // getGridViewHeightBasedOnChildren(GridView)} // * // * @param view // * @return // */ // public static void setGridViewHeightBasedOnChildren(GridView view) { // setViewHeight(view, getGridViewHeightBasedOnChildren(view)); // } /** * set ListView height which is calculated by * {@link # getListViewHeightBasedOnChildren(ListView)} * * @param view * @return */ public static void setListViewHeightBasedOnChildren(ListView view) { setViewHeight(view, getListViewHeightBasedOnChildren(view)); } /** * set AbsListView height which is calculated by * {@link # getAbsListViewHeightBasedOnChildren(AbsListView)} * * @param view * @return */ public static void setAbsListViewHeightBasedOnChildren(AbsListView view) { setViewHeight(view, getAbsListViewHeightBasedOnChildren(view)); } /** * set SearchView OnClickListener * * @param v * @param listener */ public static void setSearchViewOnClickListener(View v, OnClickListener listener) { if (v instanceof ViewGroup) { ViewGroup group = (ViewGroup) v; int count = group.getChildCount(); for (int i = 0; i < count; i++) { View child = group.getChildAt(i); if (child instanceof LinearLayout || child instanceof RelativeLayout) { setSearchViewOnClickListener(child, listener); } if (child instanceof TextView) { TextView text = (TextView) child; text.setFocusable(false); } child.setOnClickListener(listener); } } } /** * get descended views from parent. * * @param parent * @param filter * Type of views which will be returned. * @param includeSubClass * Whether returned list will include views which are subclass of * filter or not. * @return */ public static <T extends View> List<T> getDescendants(ViewGroup parent, Class<T> filter, boolean includeSubClass) { List<T> descendedViewList = new ArrayList<T>(); int childCount = parent.getChildCount(); for (int i = 0; i < childCount; i++) { View child = parent.getChildAt(i); Class<? extends View> childsClass = child.getClass(); if ((includeSubClass && filter.isAssignableFrom(childsClass)) || (!includeSubClass && childsClass == filter)) { descendedViewList.add(filter.cast(child)); } if (child instanceof ViewGroup) { descendedViewList.addAll(getDescendants((ViewGroup) child, filter, includeSubClass)); } } return descendedViewList; } }
src/com/worthed/utils/ViewUtils.java
/** * Copyright 2014 Zhenguo Jin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.worthed.utils; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.AbsListView; import android.widget.GridView; import android.widget.LinearLayout; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.RelativeLayout; import android.widget.RelativeLayout.LayoutParams; import android.widget.TextView; /** * ViewUtils * <ul> * <strong>get view height</strong> * <li>{@link ViewUtils#getListViewHeightBasedOnChildren(ListView)}</li> * <li>{@link ViewUtils#getAbsListViewHeightBasedOnChildren(AbsListView)}</li> * </ul> * <ul> * <strong>set view height</strong> * <li>{@link ViewUtils#setViewHeight(View, int)} set view height</li> * <li>{@link ViewUtils#setListViewHeightBasedOnChildren(ListView)}</li> * <li>{@link ViewUtils#setAbsListViewHeightBasedOnChildren(AbsListView)}</li> * </ul> * <ul> * <strong>get other info</strong> * <li>{@link ViewUtils#getGridViewVerticalSpacing(GridView)} get GridView * vertical spacing</li> * </ul> * <ul> * <strong>set other info</strong> * <li>{@link ViewUtils#setSearchViewOnClickListener(View, OnClickListener)}</li> * </ul> * * @author [email protected] */ public class ViewUtils { /** * get ListView height according to every children * * @param view * @return */ public static int getListViewHeightBasedOnChildren(ListView view) { int height = getAbsListViewHeightBasedOnChildren(view); ListAdapter adapter; int adapterCount; if (view != null && (adapter = view.getAdapter()) != null && (adapterCount = adapter.getCount()) > 0) { height += view.getDividerHeight() * (adapterCount - 1); } return height; } private static final String CLASS_NAME_GRID_VIEW = "android.widget.GridView"; private static final String FIELD_NAME_VERTICAL_SPACING = "mVerticalSpacing"; /** * get GridView vertical spacing * * @param view * @return */ public static int getGridViewVerticalSpacing(GridView view) { // get mVerticalSpacing by android.widget.GridView Class<?> demo = null; int verticalSpacing = 0; try { demo = Class.forName(CLASS_NAME_GRID_VIEW); Field field = demo.getDeclaredField(FIELD_NAME_VERTICAL_SPACING); field.setAccessible(true); verticalSpacing = (Integer) field.get(view); return verticalSpacing; } catch (Exception e) { /** * accept all exception, include ClassNotFoundException, * NoSuchFieldException, InstantiationException, * IllegalArgumentException, IllegalAccessException, * NullPointException */ e.printStackTrace(); } return verticalSpacing; } /** * get AbsListView height according to every children * * @param view * @return */ public static int getAbsListViewHeightBasedOnChildren(AbsListView view) { ListAdapter adapter; if (view == null || (adapter = view.getAdapter()) == null) { return 0; } int height = 0; for (int i = 0; i < adapter.getCount(); i++) { View item = adapter.getView(i, null, view); if (item instanceof ViewGroup) { item.setLayoutParams(new LayoutParams( LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT)); } item.measure(0, 0); height += item.getMeasuredHeight(); } height += view.getPaddingTop() + view.getPaddingBottom(); return height; } /** * set view height * * @param view * @param height */ public static void setViewHeight(View view, int height) { if (view == null) { return; } ViewGroup.LayoutParams params = view.getLayoutParams(); params.height = height; } // /** // * set GistView height which is calculated by {@link # // getGridViewHeightBasedOnChildren(GridView)} // * // * @param view // * @return // */ // public static void setGridViewHeightBasedOnChildren(GridView view) { // setViewHeight(view, getGridViewHeightBasedOnChildren(view)); // } /** * set ListView height which is calculated by * {@link # getListViewHeightBasedOnChildren(ListView)} * * @param view * @return */ public static void setListViewHeightBasedOnChildren(ListView view) { setViewHeight(view, getListViewHeightBasedOnChildren(view)); } /** * set AbsListView height which is calculated by * {@link # getAbsListViewHeightBasedOnChildren(AbsListView)} * * @param view * @return */ public static void setAbsListViewHeightBasedOnChildren(AbsListView view) { setViewHeight(view, getAbsListViewHeightBasedOnChildren(view)); } /** * set SearchView OnClickListener * * @param v * @param listener */ public static void setSearchViewOnClickListener(View v, OnClickListener listener) { if (v instanceof ViewGroup) { ViewGroup group = (ViewGroup) v; int count = group.getChildCount(); for (int i = 0; i < count; i++) { View child = group.getChildAt(i); if (child instanceof LinearLayout || child instanceof RelativeLayout) { setSearchViewOnClickListener(child, listener); } if (child instanceof TextView) { TextView text = (TextView) child; text.setFocusable(false); } child.setOnClickListener(listener); } } } /** * get descended views from parent. * * @param parent * @param filter * Type of views which will be returned. * @param includeSubClass * Whether returned list will include views which are subclass of * filter or not. * @return */ public static <T extends View> List<T> getDescendants(ViewGroup parent, Class<T> filter, boolean includeSubClass) { List<T> descendedViewList = new ArrayList<T>(); int childCount = parent.getChildCount(); for (int i = 0; i < childCount; i++) { View child = parent.getChildAt(i); Class<? extends View> childsClass = child.getClass(); if ((includeSubClass && filter.isAssignableFrom(childsClass)) || (!includeSubClass && childsClass == filter)) { descendedViewList.add(filter.cast(child)); } if (child instanceof ViewGroup) { descendedViewList.addAll(getDescendants((ViewGroup) child, filter, includeSubClass)); } } return descendedViewList; } }
[NewFeature] Add get text height method.
src/com/worthed/utils/ViewUtils.java
[NewFeature] Add get text height method.
<ide><path>rc/com/worthed/utils/ViewUtils.java <ide> import java.util.ArrayList; <ide> import java.util.List; <ide> <add>import android.graphics.Paint; <add>import android.graphics.Paint.FontMetrics; <ide> import android.view.View; <ide> import android.view.View.OnClickListener; <ide> import android.view.ViewGroup; <ide> } <ide> <ide> /** <add> * get Font height <add> * @param view <add> * @return <add> */ <add> public static int getFontHeight(TextView view) { <add> Paint paint = new Paint(); <add> paint.setTextSize(view.getTextSize()); <add> FontMetrics fm = paint.getFontMetrics(); <add> return (int) (Math.ceil(fm.descent - fm.ascent)); <add> } <add> <add> /** <ide> * set view height <ide> * <ide> * @param view
JavaScript
mit
9ca23a1f7833d1e4c58bd1e59edf08f6b6979408
0
mangesh/f-album,mangesh/f-album,mangesh/f-album
$(function () { $('#download-mode-group').slideUp(); $('.upload-alert').hide(); if($('#mode-group input[name="mode"]').length !== 0){ $('#mode-group input[name="mode"]').on('change', function (e) { var _this = $(this); if (_this.val() == 'download') { $('.load-album button,.upload-album button').attr('disabled',false).removeClass('upload').addClass('download').html('Download'); $('.upload-album').removeClass('active selected'); $('.album').removeClass('load-album upload-album').addClass('download-album'); $('.selected').removeClass('selected'); $('#download-mode-group').removeClass('hide'); $('.selected-picasa-albums').addClass('selected-albums').attr('disabled',true).removeClass('selected-picasa-albums'); $('p.name').addClass('hide'); $('p.button').removeClass('hide'); $('.group-label').html('Download'); } else if (_this.val() == 'upload'){ $('.load-album button,.download-album button').attr('disabled',false).addClass('upload').removeClass('download').html('Upload'); $('.download-album').removeClass('active selected'); $('.album').removeClass('load-album download-album').addClass('upload-album'); $('.selected').removeClass('selected'); $('#download-mode-group').removeClass('hide'); $('.selected-albums').addClass('selected-picasa-albums').attr('disabled',true).removeClass('selected-albums'); $('p.name').addClass('hide'); $('p.button').removeClass('hide'); $('.group-label').html('Upload'); } else { $('.album').removeClass('download-album upload-album').addClass('load-album'); $('#download-mode-group').button('reset'); $('#download-mode-group').addClass('hide'); $('.download-album button,.upload-album button').attr('disabled',false).removeClass('download upload'); $('.selected-picasa-albums,.selected-albums').addClass('selected-picasa-albums selected-albums'); $('.download-album, .upload-album').removeClass('active selected'); $('p.button').addClass('hide'); $('p.name').removeClass('hide') } $('#albums').masonry(); }) } $(document).on('click', '.download-album img', function (e) { e.preventDefault(); var _this = $(this).parents('li'); if( e.target == this ){ console.log(_this); _this.toggleClass("active").toggleClass("selected"); var selection = new Array(); $(".download-album.selected").each(function (ix, el) { selection.push($(el)[0]); }); if ( selection.length > 0 ) { $('.download').prop('disabled', true); $('.selected-albums').prop('disabled', false); } else { $('.download').prop('disabled', false); $('.selected-albums').prop('disabled', true); } } }) $(document).on('click', '.upload-album img', function (e) { e.preventDefault(); var _this = $(this).parents('li'); if( e.target == this ){ _this.toggleClass("active").toggleClass("selected"); var selection = new Array(); $(".upload-album.selected").each(function (ix, el) { selection.push($(el)[0]); }); if ( selection.length > 0 ) { $('.upload').prop('disabled', true); $('.selected-picasa-albums').prop('disabled', false); } else { $('.upload').prop('disabled', false); $('.selected-picasa-albums').prop('disabled', true); } } }) if ($('.load-album').length !== 0) { $(document).on('click', '.load-album img', function (e) { e.preventDefault(); $data = { size: 32, bgColor: "#fff", bgOpacity: 0.6, fontColor: "#000", title: '', }; $.loader.open($data); $.ajax({ method: "GET", url: "/album", data: { id: $(this).parents('li').attr('id') }, dataType: "json" }).done(function (result) { var div; var img; var li; $('.carousel-inner').html(''); $.each(result, function(p,i){ div = $('<div>'); div.addClass('item'); img = $('<img>').appendTo(div); img.attr('src',i.picture).attr('alt','image'+(p+1)).addClass('img-responsive'); img.appendTo(div); div.appendTo('.carousel-inner'); }) $('.carousel-inner div').eq(0).addClass('active'); $('.carousel-inner .item img').css('max-height',(($( window ).height()*0.8)-60)); var $carousel = $('.carousel').carousel({ interval: 3000 }).hide(); imagesLoaded( $('.carousel'), function() { $.loader.close(true); $('.carousel').show().carousel('cycle'); $('#openModal').modal({show:true}); }); }).fail(function () { }).always(function () { }) }) $(document).on('click', '.all-albums', function (e) { BootstrapDialog.confirm({ title: 'Are you sure?', message: 'This may take some time!', type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, btnCancelLabel: 'Cancel!', btnOKLabel: 'D0 it!', btnOKClass: 'btn-warning', callback: function(result) { if(result) { if($('.download-album').length > 0){ $('.download-album').removeClass("active").removeClass("selected"); $('.download-album').addClass("active").addClass("selected"); $('.selected-albums').prop('disabled', false); $('.selected-albums').trigger('click'); } else { $('.upload-album').removeClass("active").removeClass("selected"); $('.upload-album').addClass("active").addClass("selected"); $('.selected-picasa-albums').prop('disabled', false); $('.selected-picasa-albums').trigger('click'); } }else { if($('.download-album').length > 0){ $('.selected-albums').prop('disabled', false); $('.download-album').removeClass("active").removeClass("selected"); } else { $('.selected-picasa-albums').prop('disabled', false); $('.upload-album').removeClass("active").removeClass("selected"); } return false; } } }) }) $(document).on('click', 'button.download, .selected-albums', function (e) { e.preventDefault(); $data = { size: 32, bgColor: "#fff", bgOpacity: 0.6, fontColor: "#000", title: '', }; $.loader.open($data); if($(this).hasClass('download')){ $('.selected').removeClass('selected'); $(this).parents('li').addClass('selected'); } var array = jQuery('#albums li.selected').map(function(){ return 'id[]=' + this.id }).get(); $.ajax({ method: "GET", url: "/album/download", data: array.join('&'), dataType: "json" }).done(function (result) { if (result.status == 'error'){ BootstrapDialog.alert({ title: 'Error!', message: result.msg, type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { $.loader.close(true); } }) } else { $('li.album').removeClass('active selected'); $('.selected-albums').attr('disabled',true); $('.link-alert .link').attr('href',result.download_link); $('.link-alert').show(); $('button.download').attr('disabled', false); $.loader.close(true); $( "html, body" ).animate({ scrollTop: 0 }, 800); } }).fail(function () { BootstrapDialog.alert({ title: 'Error!', message: 'Technical error occured. PleaseTry again after some time.', type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { $.loader.close(true); } }) }).always(function () { }) }) $(document).on('click', 'p .close', function (e) { e.preventDefault(); $(this).parents('p').hide(); }) $(document).on('click', 'button.upload, .selected-picasa-albums', function (e) { e.preventDefault(); $data = { size: 32, bgColor: "#fff", bgOpacity: 0.6, fontColor: "#000", title: '', }; $.loader.open($data); if($(this).hasClass('upload')){ $('.selected').removeClass('selected'); $(this).parents('li').addClass('selected'); } var array = jQuery('#albums li.selected').map(function(){ return 'id[]=' + this.id }).get(); $.ajax({ method: "GET", url: "/album/upload", data: array.join('&'), dataType: "json" }).done(function (result) { if (result.status == 'need_google_login') { $.loader.close(true); authorize(); } else if (result.status == 'error'){ BootstrapDialog.alert({ title: 'Error!', message: result.msg, type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { $.loader.close(true); } }) } else { $('li.album').removeClass('active selected'); $('.selected-picasa-albums').attr('disabled',true); $('.upload-alert').show(); $( "html, body" ).animate({ scrollTop: 0 }, 800) } $.loader.close(true); }).fail(function () { BootstrapDialog.alert({ title: 'Error!', message: 'Technical error occured. PleaseTry again after some time.', type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { $.loader.close(true); } }) }).always(function () { }) }) } var gutter = parseInt(jQuery('.album').css('marginBottom')); if($('#albums').length > 0){ var $grid = $('#albums').masonry({ percentPosition: true, gutter: gutter, itemSelector: '.album', columnWidth: '.album', isFitWidth: true }); $grid.imagesLoaded().progress( function() { $grid.masonry(); }); } $(document).on("shown.bs.modal", ".modal", function(event) { if ($(".carousel").length) { $(".carousel").data("bs.carousel").fit(); } }) }) function authorize() { var oauthWindow = window.open("https://accounts.google.com/o/oauth2/auth?scope=https://picasaweb.google.com/data/&response_type=code&access_type=offline&redirect_uri=http://"+window.location.hostname+"/google_callback&approval_prompt=force&client_id=548862589391-v5so882uie6k657ehpptta1p665uvscu.apps.googleusercontent.com","_blank","width=700,height=400"); if(!oauthWindow || oauthWindow.closed || typeof oauthWindow.closed=='undefined') { BootstrapDialog.alert({ title: 'Alert!', message: 'Please unblock popup window to login with google account', type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { } }) } } function oauth_complete() { $('.selected-picasa-albums').attr('disabled',false).trigger('click'); }
public/js/application.js
$(function () { $('#download-mode-group').slideUp(); $('.upload-alert').hide(); if($('#mode-group input[name="mode"]').length !== 0){ $('#mode-group input[name="mode"]').on('change', function (e) { var _this = $(this); if (_this.val() == 'download') { $('.load-album button,.upload-album button').attr('disabled',false).removeClass('upload').addClass('download').html('Download'); $('.upload-album').removeClass('active selected'); $('.album').removeClass('load-album upload-album').addClass('download-album'); $('.selected').removeClass('selected'); $('#download-mode-group').removeClass('hide'); $('.selected-picasa-albums').addClass('selected-albums').attr('disabled',true).removeClass('selected-picasa-albums'); $('p.name').addClass('hide'); $('p.button').removeClass('hide'); $('.group-label').html('Download'); } else if (_this.val() == 'upload'){ $('.load-album button,.download-album button').attr('disabled',false).addClass('upload').removeClass('download').html('Upload'); $('.download-album').removeClass('active selected'); $('.album').removeClass('load-album download-album').addClass('upload-album'); $('.selected').removeClass('selected'); $('#download-mode-group').removeClass('hide'); $('.selected-albums').addClass('selected-picasa-albums').attr('disabled',true).removeClass('selected-albums'); $('p.name').addClass('hide'); $('p.button').removeClass('hide'); $('.group-label').html('Upload'); } else { $('.album').removeClass('download-album upload-album').addClass('load-album'); $('#download-mode-group').button('reset'); $('#download-mode-group').addClass('hide'); $('.download-album button,.upload-album button').attr('disabled',false).removeClass('download upload'); $('.selected-picasa-albums,.selected-albums').addClass('selected-picasa-albums selected-albums'); $('.download-album, .upload-album').removeClass('active selected'); $('p.button').addClass('hide'); $('p.name').removeClass('hide') } $('#albums').masonry(); }) } $(document).on('click', '.download-album img', function (e) { e.preventDefault(); var _this = $(this).parents('li'); if( e.target == this ){ console.log(_this); _this.toggleClass("active").toggleClass("selected"); var selection = new Array(); $(".download-album.selected").each(function (ix, el) { selection.push($(el)[0]); }); if ( selection.length > 0 ) { $('.download').prop('disabled', true); $('.selected-albums').prop('disabled', false); } else { $('.download').prop('disabled', false); $('.selected-albums').prop('disabled', true); } } }) $(document).on('click', '.upload-album img', function (e) { e.preventDefault(); var _this = $(this).parents('li'); if( e.target == this ){ _this.toggleClass("active").toggleClass("selected"); var selection = new Array(); $(".upload-album.selected").each(function (ix, el) { selection.push($(el)[0]); }); if ( selection.length > 0 ) { $('.upload').prop('disabled', true); $('.selected-picasa-albums').prop('disabled', false); } else { $('.upload').prop('disabled', false); $('.selected-picasa-albums').prop('disabled', true); } } }) if ($('.load-album').length !== 0) { $(document).on('click', '.load-album img', function (e) { e.preventDefault(); $data = { size: 32, bgColor: "#fff", bgOpacity: 0.6, fontColor: "#000", title: '', }; $.loader.open($data); $.ajax({ method: "GET", url: "/album", data: { id: $(this).parents('li').attr('id') }, dataType: "json" }).done(function (result) { var div; var img; var li; $('.carousel-inner').html(''); $.each(result, function(p,i){ div = $('<div>'); div.addClass('item'); img = $('<img>').appendTo(div); img.attr('src',i.picture).attr('alt','image'+(p+1)).addClass('img-responsive'); img.appendTo(div); div.appendTo('.carousel-inner'); }) $('.carousel-inner div').eq(0).addClass('active'); $('.carousel-inner .item img').css('max-height',(($( window ).height()*0.8)-60)); var $carousel = $('.carousel').carousel({ interval: 3000 }).hide(); imagesLoaded( $('.carousel'), function() { $.loader.close(true); $('.carousel').show().carousel('cycle'); $('#openModal').modal({show:true}); }); }).fail(function () { }).always(function () { }) }) $(document).on('click', '.all-albums', function (e) { BootstrapDialog.confirm({ title: 'Are you sure?', message: 'This may take some time!', type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, btnCancelLabel: 'Cancel!', btnOKLabel: 'D0 it!', btnOKClass: 'btn-warning', callback: function(result) { if(result) { if($('.download-album').length > 0){ $('.download-album').removeClass("active").removeClass("selected"); $('.download-album').addClass("active").addClass("selected"); $('.selected-albums').prop('disabled', false); $('.selected-albums').trigger('click'); } else { $('.upload-album').removeClass("active").removeClass("selected"); $('.upload-album').addClass("active").addClass("selected"); $('.selected-picasa-albums').prop('disabled', false); $('.selected-picasa-albums').trigger('click'); } }else { if($('.download-album').length > 0){ $('.selected-albums').prop('disabled', false); $('.download-album').removeClass("active").removeClass("selected"); } else { $('.selected-picasa-albums').prop('disabled', false); $('.upload-album').removeClass("active").removeClass("selected"); } return false; } } }) }) $(document).on('click', 'button.download, .selected-albums', function (e) { e.preventDefault(); $data = { size: 32, bgColor: "#fff", bgOpacity: 0.6, fontColor: "#000", title: '', }; $.loader.open($data); if($(this).hasClass('download')){ $('.selected').removeClass('selected'); $(this).parents('li').addClass('selected'); } var array = jQuery('#albums li.selected').map(function(){ return 'id[]=' + this.id }).get(); $.ajax({ method: "GET", url: "/album/download", data: array.join('&'), dataType: "json" }).done(function (result) { if (result.status == 'error'){ BootstrapDialog.alert({ title: 'Error!', message: result.msg, type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { $.loader.close(true); } }) } else { $('li.album').removeClass('active selected'); $('.selected-albums').attr('disabled',true); $('.link-alert .link').attr('href',result.download_link); $('.link-alert').show(); $('button.download').attr('disabled', false); $.loader.close(true); $( "html, body" ).animate({ scrollTop: 0 }, 800); } }).fail(function () { BootstrapDialog.alert({ title: 'Error!', message: 'Technical error occured. PleaseTry again after some time.', type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { $.loader.close(true); } }) }).always(function () { }) }) $(document).on('click', 'p .close', function (e) { e.preventDefault(); $(this).parents('p').hide(); }) $(document).on('click', 'button.upload, .selected-picasa-albums', function (e) { e.preventDefault(); $data = { size: 32, bgColor: "#fff", bgOpacity: 0.6, fontColor: "#000", title: '', }; $.loader.open($data); if($(this).hasClass('upload')){ $('.selected').removeClass('selected'); $(this).parents('li').addClass('selected'); } var array = jQuery('#albums li.selected').map(function(){ return 'id[]=' + this.id }).get(); $.ajax({ method: "GET", url: "/album/upload", data: array.join('&'), dataType: "json" }).done(function (result) { if (result.status == 'need_google_login') { $.loader.close(true); authorize(); } else if (result.status == 'error'){ BootstrapDialog.alert({ title: 'Error!', message: result.msg, type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { $.loader.close(true); } }) } else { $('li.album').removeClass('active selected'); $('.selected-picasa-albums').attr('disabled',true); $('.upload-alert').show(); $( "html, body" ).animate({ scrollTop: 0 }, 800) } $.loader.close(true); }).fail(function () { BootstrapDialog.alert({ title: 'Error!', message: 'Technical error occured. PleaseTry again after some time.', type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { $.loader.close(true); } }) }).always(function () { }) }) } var gutter = parseInt(jQuery('.album').css('marginBottom')); if($('#albums').length > 0){ var $grid = $('#albums').masonry({ percentPosition: true, gutter: gutter, itemSelector: '.album', columnWidth: '.album', isFitWidth: true }); $grid.imagesLoaded().progress( function() { $grid.masonry(); }); } $(document).on("shown.bs.modal", ".modal", function(event) { if ($(".carousel").length) { console.log('hi'); $(".carousel").data("bs.carousel").fit(); } }) }) function authorize() { var oauthWindow = window.open("https://accounts.google.com/o/oauth2/auth?scope=https://picasaweb.google.com/data/&response_type=code&access_type=offline&redirect_uri=http://"+window.location.hostname+"/google_callback&approval_prompt=force&client_id=548862589391-v5so882uie6k657ehpptta1p665uvscu.apps.googleusercontent.com","_blank","width=700,height=400"); if(!oauthWindow || oauthWindow.closed || typeof oauthWindow.closed=='undefined') { BootstrapDialog.alert({ title: 'Alert!', message: 'Please unblock popup window to login with google account', type: BootstrapDialog.TYPE_WARNING, closable: true, draggable: true, callback: function(result) { } }) } } function oauth_complete() { $('.selected-picasa-albums').attr('disabled',false).trigger('click'); }
Removed the debugging lines
public/js/application.js
Removed the debugging lines
<ide><path>ublic/js/application.js <ide> <ide> $(document).on("shown.bs.modal", ".modal", function(event) { <ide> if ($(".carousel").length) { <del> console.log('hi'); <ide> $(".carousel").data("bs.carousel").fit(); <ide> } <ide> })
Java
apache-2.0
82bd0baab2dcc4659c68919117b24dd9a8e17ebf
0
FingolfinTEK/camel,pkletsko/camel,driseley/camel,oalles/camel,mcollovati/camel,gnodet/camel,jlpedrosa/camel,w4tson/camel,salikjan/camel,jmandawg/camel,dmvolod/camel,acartapanis/camel,gautric/camel,tadayosi/camel,pmoerenhout/camel,nikhilvibhav/camel,jmandawg/camel,scranton/camel,drsquidop/camel,YoshikiHigo/camel,pmoerenhout/camel,mcollovati/camel,cunningt/camel,jamesnetherton/camel,rmarting/camel,alvinkwekel/camel,sabre1041/camel,jkorab/camel,apache/camel,YoshikiHigo/camel,snurmine/camel,sverkera/camel,hqstevenson/camel,jmandawg/camel,jamesnetherton/camel,CodeSmell/camel,nikvaessen/camel,w4tson/camel,akhettar/camel,anton-k11/camel,CodeSmell/camel,allancth/camel,allancth/camel,davidkarlsen/camel,adessaigne/camel,jlpedrosa/camel,tkopczynski/camel,dmvolod/camel,scranton/camel,rmarting/camel,jonmcewen/camel,FingolfinTEK/camel,yuruki/camel,alvinkwekel/camel,prashant2402/camel,veithen/camel,acartapanis/camel,yuruki/camel,pax95/camel,tlehoux/camel,tdiesler/camel,pmoerenhout/camel,sabre1041/camel,curso007/camel,oalles/camel,nikhilvibhav/camel,tadayosi/camel,jonmcewen/camel,anton-k11/camel,jamesnetherton/camel,tadayosi/camel,tlehoux/camel,jkorab/camel,sabre1041/camel,RohanHart/camel,Thopap/camel,sirlatrom/camel,isavin/camel,isavin/camel,lburgazzoli/apache-camel,nikvaessen/camel,chirino/camel,prashant2402/camel,lburgazzoli/apache-camel,curso007/camel,neoramon/camel,tdiesler/camel,dmvolod/camel,tdiesler/camel,akhettar/camel,JYBESSON/camel,gnodet/camel,apache/camel,jonmcewen/camel,w4tson/camel,kevinearls/camel,bhaveshdt/camel,bgaudaen/camel,driseley/camel,sirlatrom/camel,borcsokj/camel,JYBESSON/camel,sverkera/camel,borcsokj/camel,onders86/camel,cunningt/camel,apache/camel,hqstevenson/camel,nikvaessen/camel,kevinearls/camel,JYBESSON/camel,pmoerenhout/camel,apache/camel,JYBESSON/camel,NickCis/camel,christophd/camel,DariusX/camel,bgaudaen/camel,snurmine/camel,driseley/camel,salikjan/camel,ullgren/camel,drsquidop/camel,NickCis/camel,onders86/camel,yuruki/camel,pkletsko/camel,cunningt/camel,chirino/camel,hqstevenson/camel,hqstevenson/camel,DariusX/camel,adessaigne/camel,Thopap/camel,NickCis/camel,neoramon/camel,anton-k11/camel,acartapanis/camel,tdiesler/camel,scranton/camel,mgyongyosi/camel,driseley/camel,RohanHart/camel,acartapanis/camel,RohanHart/camel,jarst/camel,bgaudaen/camel,oalles/camel,oalles/camel,lburgazzoli/apache-camel,YoshikiHigo/camel,NickCis/camel,neoramon/camel,nicolaferraro/camel,tkopczynski/camel,mgyongyosi/camel,acartapanis/camel,punkhorn/camel-upstream,sverkera/camel,rmarting/camel,jamesnetherton/camel,nboukhed/camel,dmvolod/camel,Fabryprog/camel,rmarting/camel,Thopap/camel,isavin/camel,borcsokj/camel,gilfernandes/camel,punkhorn/camel-upstream,onders86/camel,gilfernandes/camel,sverkera/camel,objectiser/camel,RohanHart/camel,pax95/camel,snurmine/camel,davidkarlsen/camel,allancth/camel,NickCis/camel,rmarting/camel,jamesnetherton/camel,ssharma/camel,pmoerenhout/camel,prashant2402/camel,adessaigne/camel,anton-k11/camel,cunningt/camel,mgyongyosi/camel,lburgazzoli/camel,nikvaessen/camel,tkopczynski/camel,curso007/camel,Thopap/camel,tlehoux/camel,hqstevenson/camel,driseley/camel,anoordover/camel,christophd/camel,tlehoux/camel,sabre1041/camel,sverkera/camel,gnodet/camel,ssharma/camel,drsquidop/camel,lburgazzoli/camel,mgyongyosi/camel,neoramon/camel,zregvart/camel,jamesnetherton/camel,bhaveshdt/camel,anoordover/camel,ssharma/camel,objectiser/camel,pax95/camel,cunningt/camel,sabre1041/camel,nboukhed/camel,bhaveshdt/camel,FingolfinTEK/camel,CodeSmell/camel,jmandawg/camel,pax95/camel,chirino/camel,nikvaessen/camel,christophd/camel,pax95/camel,acartapanis/camel,sirlatrom/camel,objectiser/camel,neoramon/camel,scranton/camel,davidkarlsen/camel,gilfernandes/camel,alvinkwekel/camel,tlehoux/camel,sirlatrom/camel,isavin/camel,isavin/camel,JYBESSON/camel,davidkarlsen/camel,nicolaferraro/camel,anoordover/camel,borcsokj/camel,anoordover/camel,veithen/camel,jarst/camel,sverkera/camel,sirlatrom/camel,DariusX/camel,tdiesler/camel,YoshikiHigo/camel,neoramon/camel,tkopczynski/camel,borcsokj/camel,tdiesler/camel,tadayosi/camel,RohanHart/camel,prashant2402/camel,cunningt/camel,kevinearls/camel,gautric/camel,prashant2402/camel,jarst/camel,ssharma/camel,lburgazzoli/camel,bgaudaen/camel,drsquidop/camel,oalles/camel,snurmine/camel,w4tson/camel,pkletsko/camel,anton-k11/camel,JYBESSON/camel,veithen/camel,nikvaessen/camel,nboukhed/camel,apache/camel,lburgazzoli/camel,mcollovati/camel,DariusX/camel,punkhorn/camel-upstream,tadayosi/camel,nikhilvibhav/camel,apache/camel,NickCis/camel,gnodet/camel,curso007/camel,christophd/camel,jarst/camel,w4tson/camel,tadayosi/camel,nicolaferraro/camel,jmandawg/camel,kevinearls/camel,mgyongyosi/camel,bhaveshdt/camel,zregvart/camel,akhettar/camel,pax95/camel,lburgazzoli/apache-camel,bhaveshdt/camel,bgaudaen/camel,FingolfinTEK/camel,ullgren/camel,onders86/camel,alvinkwekel/camel,nboukhed/camel,tkopczynski/camel,objectiser/camel,prashant2402/camel,rmarting/camel,lburgazzoli/apache-camel,yuruki/camel,akhettar/camel,anoordover/camel,adessaigne/camel,kevinearls/camel,nboukhed/camel,sirlatrom/camel,bgaudaen/camel,hqstevenson/camel,ssharma/camel,chirino/camel,jlpedrosa/camel,pmoerenhout/camel,adessaigne/camel,curso007/camel,nboukhed/camel,dmvolod/camel,pkletsko/camel,jonmcewen/camel,yuruki/camel,punkhorn/camel-upstream,zregvart/camel,chirino/camel,allancth/camel,jarst/camel,jarst/camel,bhaveshdt/camel,dmvolod/camel,gilfernandes/camel,CodeSmell/camel,ullgren/camel,gautric/camel,lburgazzoli/camel,sabre1041/camel,veithen/camel,ssharma/camel,veithen/camel,onders86/camel,jonmcewen/camel,onders86/camel,YoshikiHigo/camel,jkorab/camel,nicolaferraro/camel,chirino/camel,tkopczynski/camel,zregvart/camel,akhettar/camel,jkorab/camel,jmandawg/camel,jkorab/camel,allancth/camel,Thopap/camel,Fabryprog/camel,scranton/camel,isavin/camel,FingolfinTEK/camel,jlpedrosa/camel,oalles/camel,curso007/camel,drsquidop/camel,gautric/camel,kevinearls/camel,jlpedrosa/camel,drsquidop/camel,gautric/camel,borcsokj/camel,christophd/camel,gilfernandes/camel,mgyongyosi/camel,Fabryprog/camel,anton-k11/camel,YoshikiHigo/camel,FingolfinTEK/camel,allancth/camel,christophd/camel,tlehoux/camel,RohanHart/camel,w4tson/camel,anoordover/camel,akhettar/camel,ullgren/camel,lburgazzoli/camel,adessaigne/camel,gnodet/camel,nikhilvibhav/camel,scranton/camel,gautric/camel,pkletsko/camel,snurmine/camel,mcollovati/camel,jkorab/camel,jonmcewen/camel,lburgazzoli/apache-camel,Thopap/camel,veithen/camel,driseley/camel,Fabryprog/camel,pkletsko/camel,yuruki/camel,snurmine/camel,jlpedrosa/camel,gilfernandes/camel
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kafka; import java.util.Properties; import java.util.concurrent.Future; import org.apache.camel.AsyncCallback; import org.apache.camel.CamelException; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.impl.DefaultMessage; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.errors.ApiException; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Matchers; import org.mockito.Mockito; import static org.junit.Assert.assertEquals; public class KafkaProducerTest { private KafkaProducer producer; private KafkaEndpoint endpoint; private Exchange exchange = Mockito.mock(Exchange.class); private Message in = new DefaultMessage(); private AsyncCallback callback = Mockito.mock(AsyncCallback.class); @SuppressWarnings({"unchecked"}) public KafkaProducerTest() throws Exception { endpoint = new KafkaEndpoint( "kafka:broker1:1234,broker2:4567?topic=sometopic", null); endpoint.setBrokers("broker1:1234,broker2:4567"); producer = new KafkaProducer(endpoint); RecordMetadata rm = new RecordMetadata(null, 1, 1); Future future = Mockito.mock(Future.class); Mockito.when(future.get()).thenReturn(rm); org.apache.kafka.clients.producer.KafkaProducer kp = Mockito.mock(org.apache.kafka.clients.producer.KafkaProducer.class); Mockito.when(kp.send(Mockito.any(ProducerRecord.class))).thenReturn(future); producer.setKafkaProducer(kp); } @Test public void testPropertyBuilder() throws Exception { Properties props = producer.getProps(); assertEquals("broker1:1234,broker2:4567", props.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); } @Test @SuppressWarnings({"unchecked"}) public void processSendsMessage() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange); Mockito.verify(producer.getKafkaProducer()).send(Matchers.any(ProducerRecord.class)); } @Test(expected=CamelException.class) @SuppressWarnings({"unchecked"}) public void processSendsMessageWithException() throws Exception { endpoint.setTopic("sometopic"); // setup the exception here org.apache.kafka.clients.producer.KafkaProducer kp = producer.getKafkaProducer(); Mockito.when(kp.send(Mockito.any(ProducerRecord.class))).thenThrow(new ApiException()); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange); } @Test public void processAsyncSendsMessage() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange, callback); Mockito.verify(producer.getKafkaProducer()).send(Matchers.any(ProducerRecord.class), Matchers.any(Callback.class)); } @Test public void processAsyncSendsMessageWithException() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); // setup the exception here org.apache.kafka.clients.producer.KafkaProducer kp = producer.getKafkaProducer(); Mockito.when(kp.send(Mockito.any(ProducerRecord.class), Mockito.any(Callback.class))).thenThrow(new ApiException()); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange, callback); Mockito.verify(producer.getKafkaProducer()).send(Matchers.any(ProducerRecord.class), Matchers.any(Callback.class)); Mockito.verify(exchange).setException(Matchers.isA(ApiException.class)); Mockito.verify(callback).done(Matchers.eq(true)); } @Test public void processSendsMessageWithTopicHeaderAndNoTopicInEndPoint() throws Exception { endpoint.setTopic(null); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); producer.process(exchange); verifySendMessage("anotherTopic"); } @Test public void processSendsMessageWithTopicHeaderAndEndPoint() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("4", "anotherTopic", "someKey"); } @Test(expected = CamelException.class) public void processRequiresTopicInEndpointOrInHeader() throws Exception { endpoint.setTopic(null); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange); } @Test public void processDoesNotRequirePartitionHeader() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); producer.process(exchange); } @Test public void processSendsMesssageWithPartitionKeyHeader() throws Exception { endpoint.setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("4", "someTopic", "someKey"); } @Test public void processSendsMesssageWithMessageKeyHeader() throws Exception { endpoint.setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("someTopic", "someKey"); } @Test public void processSendMessageWithBridgeEndpoint() throws Exception { endpoint.setTopic("someTopic"); endpoint.setBridgeEndpoint(true); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange); verifySendMessage("4", "someTopic", "someKey"); } @Test // Message and Topic Name alone public void processSendsMesssageWithMessageTopicName() throws Exception { endpoint.setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); producer.process(exchange); verifySendMessage("someTopic"); } @SuppressWarnings({"unchecked", "rawtypes"}) protected void verifySendMessage(String partitionKey, String topic, String messageKey) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(new Integer(partitionKey), captor.getValue().partition()); assertEquals(messageKey, captor.getValue().key()); assertEquals(topic, captor.getValue().topic()); } @SuppressWarnings({"unchecked", "rawtypes"}) protected void verifySendMessage(String topic, String messageKey) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(messageKey, captor.getValue().key()); assertEquals(topic, captor.getValue().topic()); } @SuppressWarnings({"unchecked", "rawtypes"}) protected void verifySendMessage(String topic) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(topic, captor.getValue().topic()); } }
components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kafka; import java.util.Properties; import java.util.concurrent.Future; import org.apache.camel.AsyncCallback; import org.apache.camel.CamelException; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.impl.DefaultMessage; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.errors.ApiException; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.Matchers; import org.mockito.Mockito; import static org.junit.Assert.assertEquals; public class KafkaProducerTest { private KafkaProducer producer; private KafkaEndpoint endpoint; private Exchange exchange = Mockito.mock(Exchange.class); private Message in = new DefaultMessage(); private AsyncCallback callback = Mockito.mock(AsyncCallback.class); @SuppressWarnings({"unchecked"}) public KafkaProducerTest() throws Exception { endpoint = new KafkaEndpoint( "kafka:broker1:1234,broker2:4567?topic=sometopic", null); endpoint.setBrokers("broker1:1234,broker2:4567"); producer = new KafkaProducer(endpoint); RecordMetadata rm = new RecordMetadata(null, 1, 1); Future future = Mockito.mock(Future.class); Mockito.when(future.get()).thenReturn(rm); org.apache.kafka.clients.producer.KafkaProducer kp = Mockito.mock(org.apache.kafka.clients.producer.KafkaProducer.class); Mockito.when(kp.send(Mockito.any())).thenReturn(future); producer.setKafkaProducer(kp); } @Test public void testPropertyBuilder() throws Exception { Properties props = producer.getProps(); assertEquals("broker1:1234,broker2:4567", props.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); } @Test @SuppressWarnings({"unchecked"}) public void processSendsMessage() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange); Mockito.verify(producer.getKafkaProducer()).send(Matchers.any(ProducerRecord.class)); } @Test(expected=CamelException.class) @SuppressWarnings({"unchecked"}) public void processSendsMessageWithException() throws Exception { endpoint.setTopic("sometopic"); // setup the exception here org.apache.kafka.clients.producer.KafkaProducer kp = producer.getKafkaProducer(); Mockito.when(kp.send(Mockito.any(ProducerRecord.class))).thenThrow(new ApiException()); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange); } @Test public void processAsyncSendsMessage() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange, callback); Mockito.verify(producer.getKafkaProducer()).send(Matchers.any(ProducerRecord.class), Matchers.any(Callback.class)); } @Test public void processAsyncSendsMessageWithException() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); // setup the exception here org.apache.kafka.clients.producer.KafkaProducer kp = producer.getKafkaProducer(); Mockito.when(kp.send(Mockito.any(ProducerRecord.class), Mockito.any(Callback.class))).thenThrow(new ApiException()); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange, callback); Mockito.verify(producer.getKafkaProducer()).send(Matchers.any(ProducerRecord.class), Matchers.any(Callback.class)); Mockito.verify(exchange).setException(Matchers.isA(ApiException.class)); Mockito.verify(callback).done(Matchers.eq(true)); } @Test public void processSendsMessageWithTopicHeaderAndNoTopicInEndPoint() throws Exception { endpoint.setTopic(null); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); producer.process(exchange); verifySendMessage("anotherTopic"); } @Test public void processSendsMessageWithTopicHeaderAndEndPoint() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("4", "anotherTopic", "someKey"); } @Test(expected = CamelException.class) public void processRequiresTopicInEndpointOrInHeader() throws Exception { endpoint.setTopic(null); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange); } @Test public void processDoesNotRequirePartitionHeader() throws Exception { endpoint.setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); producer.process(exchange); } @Test public void processSendsMesssageWithPartitionKeyHeader() throws Exception { endpoint.setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("4", "someTopic", "someKey"); } @Test public void processSendsMesssageWithMessageKeyHeader() throws Exception { endpoint.setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("someTopic", "someKey"); } @Test public void processSendMessageWithBridgeEndpoint() throws Exception { endpoint.setTopic("someTopic"); endpoint.setBridgeEndpoint(true); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); in.setHeader(KafkaConstants.PARTITION_KEY, "4"); producer.process(exchange); verifySendMessage("4", "someTopic", "someKey"); } @Test // Message and Topic Name alone public void processSendsMesssageWithMessageTopicName() throws Exception { endpoint.setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); producer.process(exchange); verifySendMessage("someTopic"); } @SuppressWarnings({"unchecked", "rawtypes"}) protected void verifySendMessage(String partitionKey, String topic, String messageKey) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(new Integer(partitionKey), captor.getValue().partition()); assertEquals(messageKey, captor.getValue().key()); assertEquals(topic, captor.getValue().topic()); } @SuppressWarnings({"unchecked", "rawtypes"}) protected void verifySendMessage(String topic, String messageKey) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(messageKey, captor.getValue().key()); assertEquals(topic, captor.getValue().topic()); } @SuppressWarnings({"unchecked", "rawtypes"}) protected void verifySendMessage(String topic) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(topic, captor.getValue().topic()); } }
CAMEL-9957 Fixed the casting error in KafkaProducerTest
components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerTest.java
CAMEL-9957 Fixed the casting error in KafkaProducerTest
<ide><path>omponents/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerTest.java <ide> Future future = Mockito.mock(Future.class); <ide> Mockito.when(future.get()).thenReturn(rm); <ide> org.apache.kafka.clients.producer.KafkaProducer kp = Mockito.mock(org.apache.kafka.clients.producer.KafkaProducer.class); <del> Mockito.when(kp.send(Mockito.any())).thenReturn(future); <add> Mockito.when(kp.send(Mockito.any(ProducerRecord.class))).thenReturn(future); <ide> <ide> producer.setKafkaProducer(kp); <ide> }
Java
apache-2.0
cf99caac3ac652db879ab1ec8670873e5c79c0c2
0
andrey-kuznetsov/ignite,afinka77/ignite,avinogradovgg/ignite,amirakhmedov/ignite,ilantukh/ignite,chandresh-pancholi/ignite,DoudTechData/ignite,akuznetsov-gridgain/ignite,vsuslov/incubator-ignite,nizhikov/ignite,alexzaitzev/ignite,vsisko/incubator-ignite,gargvish/ignite,murador/ignite,vsisko/incubator-ignite,SharplEr/ignite,endian675/ignite,ilantukh/ignite,voipp/ignite,dlnufox/ignite,kromulan/ignite,nizhikov/ignite,dream-x/ignite,ascherbakoff/ignite,BiryukovVA/ignite,xtern/ignite,apacheignite/ignite,louishust/incubator-ignite,kromulan/ignite,amirakhmedov/ignite,vadopolski/ignite,murador/ignite,abhishek-ch/incubator-ignite,SharplEr/ignite,shroman/ignite,BiryukovVA/ignite,ptupitsyn/ignite,vladisav/ignite,nizhikov/ignite,vladisav/ignite,endian675/ignite,BiryukovVA/ignite,gargvish/ignite,tkpanther/ignite,shroman/ignite,NSAmelchev/ignite,pperalta/ignite,vladisav/ignite,SharplEr/ignite,zzcclp/ignite,sk0x50/ignite,mcherkasov/ignite,irudyak/ignite,ptupitsyn/ignite,abhishek-ch/incubator-ignite,apache/ignite,agura/incubator-ignite,StalkXT/ignite,abhishek-ch/incubator-ignite,alexzaitzev/ignite,sylentprayer/ignite,samaitra/ignite,ashutakGG/incubator-ignite,NSAmelchev/ignite,iveselovskiy/ignite,psadusumilli/ignite,rfqu/ignite,sk0x50/ignite,VladimirErshov/ignite,dream-x/ignite,mcherkasov/ignite,NSAmelchev/ignite,afinka77/ignite,kromulan/ignite,DoudTechData/ignite,sk0x50/ignite,kidaa/incubator-ignite,zzcclp/ignite,chandresh-pancholi/ignite,StalkXT/ignite,chandresh-pancholi/ignite,shurun19851206/ignite,dlnufox/ignite,apache/ignite,tkpanther/ignite,a1vanov/ignite,vsisko/incubator-ignite,WilliamDo/ignite,agura/incubator-ignite,endian675/ignite,thuTom/ignite,louishust/incubator-ignite,tkpanther/ignite,SharplEr/ignite,murador/ignite,VladimirErshov/ignite,samaitra/ignite,avinogradovgg/ignite,ptupitsyn/ignite,shroman/ignite,vsisko/incubator-ignite,wmz7year/ignite,vsuslov/incubator-ignite,daradurvs/ignite,xtern/ignite,psadusumilli/ignite,psadusumilli/ignite,amirakhmedov/ignite,andrey-kuznetsov/ignite,endian675/ignite,louishust/incubator-ignite,dmagda/incubator-ignite,leveyj/ignite,mcherkasov/ignite,irudyak/ignite,nizhikov/ignite,avinogradovgg/ignite,endian675/ignite,shurun19851206/ignite,amirakhmedov/ignite,rfqu/ignite,sk0x50/ignite,SomeFire/ignite,NSAmelchev/ignite,gridgain/apache-ignite,alexzaitzev/ignite,murador/ignite,agoncharuk/ignite,shurun19851206/ignite,sylentprayer/ignite,louishust/incubator-ignite,WilliamDo/ignite,svladykin/ignite,nizhikov/ignite,sylentprayer/ignite,nivanov/ignite,apache/ignite,WilliamDo/ignite,shroman/ignite,vadopolski/ignite,daradurvs/ignite,rfqu/ignite,shroman/ignite,ryanzz/ignite,avinogradovgg/ignite,gargvish/ignite,mcherkasov/ignite,irudyak/ignite,xtern/ignite,thuTom/ignite,voipp/ignite,nivanov/ignite,vadopolski/ignite,vldpyatkov/ignite,svladykin/ignite,NSAmelchev/ignite,dmagda/incubator-ignite,ptupitsyn/ignite,vldpyatkov/ignite,ilantukh/ignite,adeelmahmood/ignite,ptupitsyn/ignite,vsuslov/incubator-ignite,dlnufox/ignite,ptupitsyn/ignite,nivanov/ignite,voipp/ignite,iveselovskiy/ignite,afinka77/ignite,kidaa/incubator-ignite,shurun19851206/ignite,shurun19851206/ignite,agoncharuk/ignite,sylentprayer/ignite,dmagda/incubator-ignite,a1vanov/ignite,mcherkasov/ignite,dream-x/ignite,ntikhonov/ignite,thuTom/ignite,kidaa/incubator-ignite,andrey-kuznetsov/ignite,vsuslov/incubator-ignite,wmz7year/ignite,wmz7year/ignite,adeelmahmood/ignite,abhishek-ch/incubator-ignite,nivanov/ignite,VladimirErshov/ignite,psadusumilli/ignite,ntikhonov/ignite,akuznetsov-gridgain/ignite,endian675/ignite,dmagda/incubator-ignite,alexzaitzev/ignite,vldpyatkov/ignite,NSAmelchev/ignite,vadopolski/ignite,ascherbakoff/ignite,kidaa/incubator-ignite,zzcclp/ignite,voipp/ignite,ryanzz/ignite,andrey-kuznetsov/ignite,agoncharuk/ignite,ilantukh/ignite,apache/ignite,amirakhmedov/ignite,dream-x/ignite,chandresh-pancholi/ignite,irudyak/ignite,NSAmelchev/ignite,dlnufox/ignite,iveselovskiy/ignite,ashutakGG/incubator-ignite,ashutakGG/incubator-ignite,murador/ignite,WilliamDo/ignite,leveyj/ignite,thuTom/ignite,leveyj/ignite,psadusumilli/ignite,WilliamDo/ignite,ascherbakoff/ignite,StalkXT/ignite,ilantukh/ignite,daradurvs/ignite,f7753/ignite,samaitra/ignite,sk0x50/ignite,DoudTechData/ignite,agoncharuk/ignite,a1vanov/ignite,zzcclp/ignite,shroman/ignite,tkpanther/ignite,vadopolski/ignite,irudyak/ignite,f7753/ignite,wmz7year/ignite,akuznetsov-gridgain/ignite,VladimirErshov/ignite,zzcclp/ignite,gargvish/ignite,xtern/ignite,sk0x50/ignite,DoudTechData/ignite,leveyj/ignite,irudyak/ignite,abhishek-ch/incubator-ignite,sylentprayer/ignite,ntikhonov/ignite,psadusumilli/ignite,a1vanov/ignite,gridgain/apache-ignite,ptupitsyn/ignite,andrey-kuznetsov/ignite,avinogradovgg/ignite,tkpanther/ignite,ryanzz/ignite,BiryukovVA/ignite,vldpyatkov/ignite,tkpanther/ignite,daradurvs/ignite,pperalta/ignite,pperalta/ignite,vldpyatkov/ignite,zzcclp/ignite,VladimirErshov/ignite,murador/ignite,voipp/ignite,samaitra/ignite,samaitra/ignite,shroman/ignite,wmz7year/ignite,adeelmahmood/ignite,andrey-kuznetsov/ignite,ilantukh/ignite,ilantukh/ignite,andrey-kuznetsov/ignite,dream-x/ignite,SomeFire/ignite,ryanzz/ignite,SomeFire/ignite,apacheignite/ignite,ascherbakoff/ignite,ashutakGG/incubator-ignite,SharplEr/ignite,kidaa/incubator-ignite,StalkXT/ignite,shroman/ignite,apache/ignite,sylentprayer/ignite,thuTom/ignite,daradurvs/ignite,agoncharuk/ignite,vldpyatkov/ignite,agura/incubator-ignite,chandresh-pancholi/ignite,zzcclp/ignite,afinka77/ignite,rfqu/ignite,a1vanov/ignite,dlnufox/ignite,agura/incubator-ignite,irudyak/ignite,daradurvs/ignite,samaitra/ignite,DoudTechData/ignite,wmz7year/ignite,dmagda/incubator-ignite,BiryukovVA/ignite,BiryukovVA/ignite,dlnufox/ignite,pperalta/ignite,rfqu/ignite,ascherbakoff/ignite,samaitra/ignite,ascherbakoff/ignite,pperalta/ignite,DoudTechData/ignite,shurun19851206/ignite,pperalta/ignite,iveselovskiy/ignite,daradurvs/ignite,daradurvs/ignite,a1vanov/ignite,kromulan/ignite,kromulan/ignite,ptupitsyn/ignite,voipp/ignite,vldpyatkov/ignite,andrey-kuznetsov/ignite,SomeFire/ignite,leveyj/ignite,VladimirErshov/ignite,a1vanov/ignite,agura/incubator-ignite,dlnufox/ignite,SomeFire/ignite,VladimirErshov/ignite,ntikhonov/ignite,endian675/ignite,mcherkasov/ignite,tkpanther/ignite,daradurvs/ignite,mcherkasov/ignite,louishust/incubator-ignite,shroman/ignite,sylentprayer/ignite,iveselovskiy/ignite,adeelmahmood/ignite,f7753/ignite,vladisav/ignite,nizhikov/ignite,wmz7year/ignite,louishust/incubator-ignite,gridgain/apache-ignite,gargvish/ignite,agura/incubator-ignite,leveyj/ignite,f7753/ignite,StalkXT/ignite,akuznetsov-gridgain/ignite,nizhikov/ignite,irudyak/ignite,agoncharuk/ignite,kromulan/ignite,rfqu/ignite,ptupitsyn/ignite,arijitt/incubator-ignite,gargvish/ignite,arijitt/incubator-ignite,NSAmelchev/ignite,svladykin/ignite,SomeFire/ignite,nivanov/ignite,f7753/ignite,NSAmelchev/ignite,afinka77/ignite,BiryukovVA/ignite,murador/ignite,dream-x/ignite,alexzaitzev/ignite,dlnufox/ignite,daradurvs/ignite,ashutakGG/incubator-ignite,StalkXT/ignite,vladisav/ignite,ryanzz/ignite,kidaa/incubator-ignite,wmz7year/ignite,agura/incubator-ignite,voipp/ignite,zzcclp/ignite,vladisav/ignite,dmagda/incubator-ignite,ryanzz/ignite,vadopolski/ignite,gargvish/ignite,leveyj/ignite,gridgain/apache-ignite,f7753/ignite,afinka77/ignite,nizhikov/ignite,arijitt/incubator-ignite,DoudTechData/ignite,WilliamDo/ignite,svladykin/ignite,xtern/ignite,andrey-kuznetsov/ignite,gridgain/apache-ignite,SharplEr/ignite,nivanov/ignite,ntikhonov/ignite,sk0x50/ignite,ascherbakoff/ignite,svladykin/ignite,rfqu/ignite,BiryukovVA/ignite,chandresh-pancholi/ignite,nizhikov/ignite,avinogradovgg/ignite,agoncharuk/ignite,f7753/ignite,apacheignite/ignite,gridgain/apache-ignite,SomeFire/ignite,iveselovskiy/ignite,SharplEr/ignite,leveyj/ignite,apacheignite/ignite,xtern/ignite,adeelmahmood/ignite,akuznetsov-gridgain/ignite,avinogradovgg/ignite,agoncharuk/ignite,alexzaitzev/ignite,vsuslov/incubator-ignite,DoudTechData/ignite,apacheignite/ignite,vsisko/incubator-ignite,sk0x50/ignite,irudyak/ignite,BiryukovVA/ignite,voipp/ignite,a1vanov/ignite,psadusumilli/ignite,alexzaitzev/ignite,SomeFire/ignite,rfqu/ignite,SharplEr/ignite,adeelmahmood/ignite,dmagda/incubator-ignite,afinka77/ignite,vladisav/ignite,dmagda/incubator-ignite,StalkXT/ignite,alexzaitzev/ignite,WilliamDo/ignite,endian675/ignite,sk0x50/ignite,sylentprayer/ignite,ascherbakoff/ignite,svladykin/ignite,adeelmahmood/ignite,voipp/ignite,ascherbakoff/ignite,kromulan/ignite,apacheignite/ignite,StalkXT/ignite,shurun19851206/ignite,vadopolski/ignite,SomeFire/ignite,ilantukh/ignite,afinka77/ignite,dream-x/ignite,pperalta/ignite,kromulan/ignite,apache/ignite,vsisko/incubator-ignite,StalkXT/ignite,gargvish/ignite,gridgain/apache-ignite,f7753/ignite,vsuslov/incubator-ignite,mcherkasov/ignite,vladisav/ignite,arijitt/incubator-ignite,apacheignite/ignite,chandresh-pancholi/ignite,nivanov/ignite,SomeFire/ignite,dream-x/ignite,xtern/ignite,ilantukh/ignite,xtern/ignite,vsisko/incubator-ignite,amirakhmedov/ignite,ryanzz/ignite,adeelmahmood/ignite,andrey-kuznetsov/ignite,ashutakGG/incubator-ignite,alexzaitzev/ignite,apacheignite/ignite,murador/ignite,ptupitsyn/ignite,svladykin/ignite,apache/ignite,amirakhmedov/ignite,ntikhonov/ignite,amirakhmedov/ignite,thuTom/ignite,chandresh-pancholi/ignite,ntikhonov/ignite,chandresh-pancholi/ignite,SharplEr/ignite,nivanov/ignite,ryanzz/ignite,ilantukh/ignite,thuTom/ignite,arijitt/incubator-ignite,WilliamDo/ignite,samaitra/ignite,tkpanther/ignite,vsisko/incubator-ignite,ntikhonov/ignite,agura/incubator-ignite,vadopolski/ignite,apache/ignite,pperalta/ignite,amirakhmedov/ignite,shroman/ignite,samaitra/ignite,BiryukovVA/ignite,arijitt/incubator-ignite,VladimirErshov/ignite,samaitra/ignite,abhishek-ch/incubator-ignite,apache/ignite,xtern/ignite,psadusumilli/ignite,vldpyatkov/ignite,shurun19851206/ignite,thuTom/ignite,akuznetsov-gridgain/ignite
/* @java.file.header */ /* _________ _____ __________________ _____ * __ ____/___________(_)______ /__ ____/______ ____(_)_______ * _ / __ __ ___/__ / _ __ / _ / __ _ __ `/__ / __ __ \ * / /_/ / _ / _ / / /_/ / / /_/ / / /_/ / _ / _ / / / * \____/ /_/ /_/ \_,__/ \____/ \__,_/ /_/ /_/ /_/ */ package org.gridgain.grid.kernal.processors.cache; import org.apache.ignite.*; import org.apache.ignite.configuration.*; import org.apache.ignite.lang.*; import org.apache.ignite.marshaller.optimized.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.spi.swapspace.file.*; import org.gridgain.grid.cache.*; import org.gridgain.grid.cache.eviction.lru.*; import org.gridgain.grid.cache.query.*; import org.gridgain.grid.kernal.*; import org.gridgain.grid.kernal.processors.cache.query.*; import org.gridgain.grid.kernal.processors.query.*; import org.gridgain.grid.kernal.processors.query.h2.*; import org.gridgain.grid.util.typedef.*; import org.gridgain.testframework.junits.common.*; import org.jetbrains.annotations.*; import java.io.*; import java.util.*; import java.util.concurrent.atomic.*; import static org.gridgain.grid.cache.GridCacheAtomicityMode.*; import static org.gridgain.grid.cache.GridCacheMode.*; /** * Multi-threaded tests for cache queries. */ @SuppressWarnings("StatementWithEmptyBody") public class GridCacheQueryMultiThreadedSelfTest extends GridCommonAbstractTest { /** */ private static final boolean TEST_INFO = true; /** Number of test grids (nodes). Should not be less than 2. */ private static final int GRID_CNT = 2; /** */ private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private static AtomicInteger idxSwapCnt = new AtomicInteger(); /** */ private static AtomicInteger idxUnswapCnt = new AtomicInteger(); /** */ private static final long DURATION = 30 * 1000; /** Don't start grid by default. */ public GridCacheQueryMultiThreadedSelfTest() { super(false); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(ipFinder); cfg.setDiscoverySpi(disco); cfg.setSwapSpaceSpi(new FileSwapSpaceSpi()); cfg.setMarshaller(new IgniteOptimizedMarshaller(false)); GridCacheConfiguration cacheCfg = defaultCacheConfiguration(); cacheCfg.setCacheMode(PARTITIONED); cacheCfg.setAtomicityMode(TRANSACTIONAL); cacheCfg.setDistributionMode(GridCacheDistributionMode.NEAR_PARTITIONED); cacheCfg.setWriteSynchronizationMode(GridCacheWriteSynchronizationMode.FULL_SYNC); cacheCfg.setSwapEnabled(true); cacheCfg.setBackups(1); cacheCfg.setEvictionPolicy(evictsEnabled() ? new GridCacheLruEvictionPolicy(100) : null); GridCacheQueryConfiguration qcfg = new GridCacheQueryConfiguration(); qcfg.setIndexPrimitiveKey(true); cacheCfg.setQueryConfiguration(qcfg); if (offheapEnabled() && evictsEnabled()) cacheCfg.setOffHeapMaxMemory(1000); // Small offheap for evictions. cfg.setCacheConfiguration(cacheCfg); GridQueryConfiguration indexing = new GridQueryConfiguration(); indexing.setMaxOffheapRowsCacheSize(128); if (offheapEnabled()) indexing.setMaxOffHeapMemory(0); cfg.setQueryConfiguration(indexing); GridQueryProcessor.idxCls = FakeIndexing.class; return cfg; } /** * */ private static class FakeIndexing extends GridH2Indexing { @Override public void onSwap(@Nullable String spaceName, Object key) throws IgniteCheckedException { super.onSwap(spaceName, key); idxSwapCnt.incrementAndGet(); } @Override public void onUnswap(@Nullable String spaceName, Object key, Object val, byte[] valBytes) throws IgniteCheckedException { super.onUnswap(spaceName, key, val, valBytes); idxUnswapCnt.incrementAndGet(); } } /** @return {@code true} If offheap enabled. */ protected boolean offheapEnabled() { return false; } /** @return {@code true} If evictions enabled. */ protected boolean evictsEnabled() { return true; } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { super.beforeTest(); // Clean up all caches. for (int i = 0; i < GRID_CNT; i++) { GridCache<Object, Object> c = grid(i).cache(null); assertEquals(0, c.size()); } } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { assert GRID_CNT >= 2 : "Constant GRID_CNT must be greater than or equal to 2."; startGridsMultiThreaded(GRID_CNT); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); if (evictsEnabled()) { assertTrue(idxSwapCnt.get() > 0); assertTrue(idxUnswapCnt.get() > 0); } } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); // Clean up all caches. for (int i = 0; i < GRID_CNT; i++) { GridCache<Object, Object> c = grid(i).cache(null); c.removeAll(F.<GridCacheEntry<Object, Object>>alwaysTrue()); Iterator<Map.Entry<Object, Object>> it = c.swapIterator(); while (it.hasNext()) { it.next(); it.remove(); } it = c.offHeapIterator(); while (it.hasNext()) { it.next(); it.remove(); } assertEquals("Swap keys: " + c.swapKeys(), 0, c.swapKeys()); assertEquals(0, c.offHeapEntriesCount()); assertEquals(0, c.size()); } } /** {@inheritDoc} */ @Override protected void info(String msg) { if (TEST_INFO) super.info(msg); } /** * @param entries Entries. * @param g Grid. * @return Affinity nodes. */ private Set<UUID> affinityNodes(Iterable<Map.Entry<Integer, Integer>> entries, Ignite g) { Set<UUID> nodes = new HashSet<>(); for (Map.Entry<Integer, Integer> entry : entries) nodes.add(g.cache(null).affinity().mapKeyToPrimaryAndBackups(entry.getKey()).iterator().next().id()); return nodes; } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedSwapUnswapString() throws Exception { int threadCnt = 150; final int keyCnt = 2000; final int valCnt = 10000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, String> c = g.cache(null); assertEquals(0, g.cache(null).size()); assertEquals(0, c.queries().createSqlQuery(String.class, "1 = 1").execute().get().size()); assertEquals(0, c.queries().createSqlQuery(Long.class, "1 = 1").execute().get().size()); Random rnd = new Random(); for (int i = 0; i < keyCnt; i += 1 + rnd.nextInt(3)) { c.putx(i, String.valueOf(rnd.nextInt(valCnt))); if (evictsEnabled() && rnd.nextBoolean()) assertTrue(c.evict(i)); } final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { Random rnd = new Random(); while (!done.get()) { switch (rnd.nextInt(5)) { case 0: c.putx(rnd.nextInt(keyCnt), String.valueOf(rnd.nextInt(valCnt))); break; case 1: if (evictsEnabled()) c.evict(rnd.nextInt(keyCnt)); break; case 2: c.remove(rnd.nextInt(keyCnt)); break; case 3: c.get(rnd.nextInt(keyCnt)); break; case 4: GridCacheQuery<Map.Entry<Integer, String>> qry = c.queries().createSqlQuery( String.class, "_val between ? and ?"); int from = rnd.nextInt(valCnt); GridCacheQueryFuture<Map.Entry<Integer, String>> fut = qry.execute(String.valueOf(from), String.valueOf(from + 250)); Collection<Map.Entry<Integer, String>> res = fut.get(); for (Map.Entry<Integer, String> ignored : res) { //No-op. } } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedSwapUnswapLong() throws Exception { int threadCnt = 150; final int keyCnt = 2000; final int valCnt = 10000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, Long> c = g.cache(null); assertEquals(0, g.cache(null).size()); assertEquals(0, c.queries().createSqlQuery(String.class, "1 = 1").execute().get().size()); assertEquals(0, c.queries().createSqlQuery(Long.class, "1 = 1").execute().get().size()); Random rnd = new Random(); for (int i = 0; i < keyCnt; i += 1 + rnd.nextInt(3)) { c.putx(i, (long)rnd.nextInt(valCnt)); if (evictsEnabled() && rnd.nextBoolean()) assertTrue(c.evict(i)); } final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { Random rnd = new Random(); while (!done.get()) { int key = rnd.nextInt(keyCnt); switch (rnd.nextInt(5)) { case 0: c.putx(key, (long)rnd.nextInt(valCnt)); break; case 1: if (evictsEnabled()) c.evict(key); break; case 2: c.remove(key); break; case 3: c.get(key); break; case 4: GridCacheQuery<Map.Entry<Integer, Long>> qry = c.queries().createSqlQuery( Long.class, "_val between ? and ?"); int from = rnd.nextInt(valCnt); GridCacheQueryFuture<Map.Entry<Integer, Long>> f = qry.execute(from, from + 250); Collection<Map.Entry<Integer, Long>> res = f.get(); for (Map.Entry<Integer, Long> ignored : res) { //No-op. } } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedSwapUnswapLongString() throws Exception { int threadCnt = 150; final int keyCnt = 2000; final int valCnt = 10000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, Object> c = g.cache(null); assertEquals(0, g.cache(null).size()); assertEquals(0, c.offHeapEntriesCount()); // assertEquals(0, c.swapKeys()); assertEquals(0, c.queries().createSqlQuery(String.class, "1 = 1").execute().get().size()); assertEquals(0, c.queries().createSqlQuery(Long.class, "1 = 1").execute().get().size()); Random rnd = new Random(); for (int i = 0; i < keyCnt; i += 1 + rnd.nextInt(3)) { c.putx(i, rnd.nextBoolean() ? (long)rnd.nextInt(valCnt) : String.valueOf(rnd.nextInt(valCnt))); if (evictsEnabled() && rnd.nextBoolean()) assertTrue(c.evict(i)); } final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { Random rnd = new Random(); while (!done.get()) { int key = rnd.nextInt(keyCnt); switch (rnd.nextInt(5)) { case 0: c.putx(key, rnd.nextBoolean() ? (long)rnd.nextInt(valCnt) : String.valueOf(rnd.nextInt(valCnt))); break; case 1: if (evictsEnabled()) c.evict(key); break; case 2: c.remove(key); break; case 3: c.get(key); break; case 4: GridCacheQuery<Map.Entry<Integer, Object>> qry = c.queries().createSqlQuery( rnd.nextBoolean() ? Long.class : String.class, "_val between ? and ?"); int from = rnd.nextInt(valCnt); GridCacheQueryFuture<Map.Entry<Integer, Object>> f = qry.execute(from, from + 250); Collection<Map.Entry<Integer, Object>> res = f.get(); for (Map.Entry<Integer, Object> ignored : res) { //No-op. } } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedSwapUnswapObject() throws Exception { int threadCnt = 50; final int keyCnt = 4000; final int valCnt = 10000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, TestValue> c = g.cache(null); assertEquals(0, g.cache(null).size()); assertEquals(0, c.queries().createSqlQuery(String.class, "1 = 1").execute().get().size()); assertEquals(0, c.queries().createSqlQuery(Long.class, "1 = 1").execute().get().size()); Random rnd = new Random(); for (int i = 0; i < keyCnt; i += 1 + rnd.nextInt(3)) { c.putx(i, new TestValue(rnd.nextInt(valCnt))); if (evictsEnabled() && rnd.nextBoolean()) assertTrue(c.evict(i)); } final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { Random rnd = new Random(); while (!done.get()) { int key = rnd.nextInt(keyCnt); switch (rnd.nextInt(5)) { case 0: c.putx(key, new TestValue(rnd.nextInt(valCnt))); break; case 1: if (evictsEnabled()) c.evict(key); break; case 2: c.remove(key); break; case 3: c.get(key); break; case 4: GridCacheQuery<Map.Entry<Integer, TestValue>> qry = c.queries().createSqlQuery( Long.class, "TestValue.val between ? and ?"); int from = rnd.nextInt(valCnt); GridCacheQueryFuture<Map.Entry<Integer, TestValue>> f = qry.execute(from, from + 250); Collection<Map.Entry<Integer, TestValue>> res = f.get(); for (Map.Entry<Integer, TestValue> ignored : res) { //No-op. } } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedSameQuery() throws Exception { int threadCnt = 50; final int keyCnt = 10; final int logMod = 5000; final Ignite g = grid(0); // Put test values into cache. GridCache<Integer, Integer> c = g.cache(null); for (int i = 0; i < keyCnt; i++) { c.putx(i, i); info("Affinity [key=" + i + ", aff=" + c.affinity().mapKeyToPrimaryAndBackups(i).iterator().next().id() + ']'); assertTrue(c.evict(i)); } final AtomicInteger cnt = new AtomicInteger(); final AtomicBoolean done = new AtomicBoolean(); final GridCacheQuery<Map.Entry<Integer, Integer>> qry = c.queries().createSqlQuery(Integer.class, "_val >= 0"); IgniteFuture<?> fut = multithreadedAsync( new CAX() { @Override public void applyx() throws IgniteCheckedException { int iter = 0; while (!done.get() && !Thread.currentThread().isInterrupted()) { iter++; GridCacheQueryFuture<Map.Entry<Integer, Integer>> fut = qry.execute(); Collection<Map.Entry<Integer, Integer>> entries = fut.get(); assert entries != null; assertEquals("Query results [entries=" + entries + ", aff=" + affinityNodes(entries, g) + ", iteration=" + iter + ']', keyCnt, entries.size()); if (cnt.incrementAndGet() % logMod == 0) { GridCacheQueryManager<Object, Object> qryMgr = ((GridKernal)g).internalCache().context().queries(); assert qryMgr != null; qryMgr.printMemoryStats(); } } } }, threadCnt); Thread.sleep(DURATION); info("Finishing test..."); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedNewQueries() throws Exception { int threadCnt = 50; final int keyCnt = 10; final int logMod = 5000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, Integer> c = g.cache(null); for (int i = 0; i < keyCnt; i++) { c.putx(i, i); assertTrue(c.evict(i)); } final AtomicInteger cnt = new AtomicInteger(); final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { int iter = 0; while (!done.get() && !Thread.currentThread().isInterrupted()) { iter++; GridCacheQuery<Map.Entry<Integer, Integer>> qry = c.queries().createSqlQuery(Integer.class, "_val >= 0"); GridCacheQueryFuture<Map.Entry<Integer, Integer>> fut = qry.execute(); Collection<Map.Entry<Integer, Integer>> entries = fut.get(); assert entries != null; assertEquals("Entries count is not as expected on iteration: " + iter, keyCnt, entries.size()); if (cnt.incrementAndGet() % logMod == 0) { GridCacheQueryManager<Object, Object> qryMgr = ((GridKernal)g).internalCache().context().queries(); assert qryMgr != null; qryMgr.printMemoryStats(); } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedReduceQuery() throws Exception { int threadCnt = 50; int keyCnt = 10; final int logMod = 5000; final Ignite g = grid(0); // Put test values into cache. GridCache<Integer, Integer> c = g.cache(null); for (int i = 0; i < keyCnt; i++) c.putx(i, i); final GridCacheQuery<Map.Entry<Integer, Integer>> rdcQry = c.queries().createSqlQuery(Integer.class, "_val > 1 and _val < 4"); rdcQry.includeBackups(true); rdcQry.keepAll(true); final IgniteReducer<Map.Entry<Integer, Integer>, Integer> rmtRdc = new IgniteReducer<Map.Entry<Integer, Integer>, Integer>() { /** Reducer result. */ private int res; @Override public boolean collect(Map.Entry<Integer, Integer> e) { res += e.getKey(); return true; } @Override public Integer reduce() { return res; } }; final AtomicInteger cnt = new AtomicInteger(); final AtomicBoolean stop = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { while (!stop.get()) { Collection<Integer> rmtVals = rdcQry.execute(rmtRdc).get(); assertEquals(GRID_CNT, rmtVals.size()); Iterator<Integer> reduceIter = rmtVals.iterator(); assert reduceIter != null; for (int i = 0; i < GRID_CNT; i++) { assert reduceIter.hasNext(); assertEquals(Integer.valueOf(5), reduceIter.next()); } Collection<Integer> res = rdcQry.execute(rmtRdc).get(); int val = F.sumInt(res); int expVal = 5 * GRID_CNT; assertEquals(expVal, val); if (cnt.incrementAndGet() % logMod == 0) { GridCacheQueryManager<Object, Object> qryMgr = ((GridKernal)g).internalCache().context().queries(); assert qryMgr != null; qryMgr.printMemoryStats(); } } } }, threadCnt); Thread.sleep(DURATION); stop.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedScanQuery() throws Exception { int threadCnt = 50; final int keyCnt = 500; final int logMod = 5000; final Ignite g = grid(0); // Put test values into cache. GridCache<Integer, Integer> c = g.cache(null); for (int i = 0; i < keyCnt; i++) c.putx(i, i); final AtomicInteger cnt = new AtomicInteger(); final AtomicBoolean done = new AtomicBoolean(); final GridCacheQuery<Map.Entry<Integer, Integer>> qry = c.queries().createScanQuery(null); IgniteFuture<?> fut = multithreadedAsync( new CAX() { @Override public void applyx() throws IgniteCheckedException { int iter = 0; while (!done.get() && !Thread.currentThread().isInterrupted()) { iter++; GridCacheQueryFuture<Map.Entry<Integer, Integer>> fut = qry.execute(); Collection<Map.Entry<Integer, Integer>> entries = fut.get(); assert entries != null; assertEquals("Entries count is not as expected on iteration: " + iter, keyCnt, entries.size()); if (cnt.incrementAndGet() % logMod == 0) { GridCacheQueryManager<Object, Object> qryMgr = ((GridKernal)g).internalCache().context().queries(); assert qryMgr != null; qryMgr.printMemoryStats(); } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * Test value. */ private static class TestValue implements Serializable { /** Value. */ @GridCacheQuerySqlField private int val; /** * @param val Value. */ private TestValue(int val) { this.val = val; } /** * @return Value. */ public int value() { return val; } } }
modules/indexing/src/test/java/org/gridgain/grid/kernal/processors/cache/GridCacheQueryMultiThreadedSelfTest.java
/* @java.file.header */ /* _________ _____ __________________ _____ * __ ____/___________(_)______ /__ ____/______ ____(_)_______ * _ / __ __ ___/__ / _ __ / _ / __ _ __ `/__ / __ __ \ * / /_/ / _ / _ / / /_/ / / /_/ / / /_/ / _ / _ / / / * \____/ /_/ /_/ \_,__/ \____/ \__,_/ /_/ /_/ /_/ */ package org.gridgain.grid.kernal.processors.cache; import org.apache.ignite.*; import org.apache.ignite.configuration.*; import org.apache.ignite.lang.*; import org.apache.ignite.marshaller.optimized.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import org.apache.ignite.spi.swapspace.file.*; import org.gridgain.grid.*; import org.gridgain.grid.cache.*; import org.gridgain.grid.cache.eviction.lru.*; import org.gridgain.grid.cache.query.*; import org.gridgain.grid.kernal.*; import org.gridgain.grid.kernal.processors.cache.query.*; import org.gridgain.grid.kernal.processors.query.*; import org.gridgain.grid.kernal.processors.query.h2.*; import org.gridgain.grid.util.typedef.*; import org.gridgain.testframework.junits.common.*; import org.jetbrains.annotations.*; import java.io.*; import java.util.*; import java.util.concurrent.atomic.*; import static org.gridgain.grid.cache.GridCacheAtomicityMode.*; import static org.gridgain.grid.cache.GridCacheMode.*; /** * Multi-threaded tests for cache queries. */ @SuppressWarnings("StatementWithEmptyBody") public class GridCacheQueryMultiThreadedSelfTest extends GridCommonAbstractTest { /** */ private static final boolean TEST_INFO = true; /** Number of test grids (nodes). Should not be less than 2. */ private static final int GRID_CNT = 2; /** */ private static TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true); /** */ private static AtomicInteger idxSwapCnt = new AtomicInteger(); /** */ private static AtomicInteger idxUnswapCnt = new AtomicInteger(); /** */ private static final long DURATION = 30 * 1000; /** Don't start grid by default. */ public GridCacheQueryMultiThreadedSelfTest() { super(false); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(ipFinder); cfg.setDiscoverySpi(disco); cfg.setSwapSpaceSpi(new FileSwapSpaceSpi()); cfg.setMarshaller(new IgniteOptimizedMarshaller(false)); GridCacheConfiguration cacheCfg = defaultCacheConfiguration(); cacheCfg.setCacheMode(PARTITIONED); cacheCfg.setAtomicityMode(TRANSACTIONAL); cacheCfg.setDistributionMode(GridCacheDistributionMode.NEAR_PARTITIONED); cacheCfg.setWriteSynchronizationMode(GridCacheWriteSynchronizationMode.FULL_SYNC); cacheCfg.setSwapEnabled(true); cacheCfg.setBackups(1); cacheCfg.setEvictionPolicy(evictsEnabled() ? new GridCacheLruEvictionPolicy(100) : null); GridCacheQueryConfiguration qcfg = new GridCacheQueryConfiguration(); qcfg.setIndexPrimitiveKey(true); cacheCfg.setQueryConfiguration(qcfg); if (offheapEnabled() && evictsEnabled()) cacheCfg.setOffHeapMaxMemory(1000); // Small offheap for evictions. cfg.setCacheConfiguration(cacheCfg); GridQueryConfiguration indexing = new GridQueryConfiguration(); indexing.setMaxOffheapRowsCacheSize(128); if (offheapEnabled()) indexing.setMaxOffHeapMemory(0); cfg.setQueryConfiguration(indexing); GridQueryProcessor.idxCls = FakeIndexing.class; return cfg; } /** * */ private static class FakeIndexing extends GridH2Indexing { @Override public void onSwap(@Nullable String spaceName, Object key) throws IgniteCheckedException { super.onSwap(spaceName, key); idxSwapCnt.incrementAndGet(); } @Override public void onUnswap(@Nullable String spaceName, Object key, Object val, byte[] valBytes) throws IgniteCheckedException { super.onUnswap(spaceName, key, val, valBytes); idxUnswapCnt.incrementAndGet(); } } /** @return {@code true} If offheap enabled. */ protected boolean offheapEnabled() { return false; } /** @return {@code true} If evictions enabled. */ protected boolean evictsEnabled() { return true; } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { super.beforeTest(); // Clean up all caches. for (int i = 0; i < GRID_CNT; i++) { GridCache<Object, Object> c = grid(i).cache(null); assertEquals(0, c.size()); } } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { assert GRID_CNT >= 2 : "Constant GRID_CNT must be greater than or equal to 2."; startGridsMultiThreaded(GRID_CNT); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); if (evictsEnabled()) { assertTrue(idxSwapCnt.get() > 0); assertTrue(idxUnswapCnt.get() > 0); } } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); // Clean up all caches. for (int i = 0; i < GRID_CNT; i++) { GridCache<Object, Object> c = grid(i).cache(null); c.removeAll(F.<GridCacheEntry<Object, Object>>alwaysTrue()); Iterator<Map.Entry<Object, Object>> it = c.swapIterator(); while (it.hasNext()) { it.next(); it.remove(); } it = c.offHeapIterator(); while (it.hasNext()) { it.next(); it.remove(); } assertEquals("Swap keys: " + c.swapKeys(), 0, c.swapKeys()); assertEquals(0, c.offHeapEntriesCount()); assertEquals(0, c.size()); } } /** {@inheritDoc} */ @Override protected void info(String msg) { if (TEST_INFO) super.info(msg); } /** * @param entries Entries. * @param g Grid. * @return Affinity nodes. */ private Set<UUID> affinityNodes(Iterable<Map.Entry<Integer, Integer>> entries, Ignite g) { Set<UUID> nodes = new HashSet<>(); for (Map.Entry<Integer, Integer> entry : entries) nodes.add(g.cache(null).affinity().mapKeyToPrimaryAndBackups(entry.getKey()).iterator().next().id()); return nodes; } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void _testMultiThreadedSwapUnswapString() throws Exception { // TODO GG-9141 int threadCnt = 150; final int keyCnt = 2000; final int valCnt = 10000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, String> c = g.cache(null); assertEquals(0, g.cache(null).size()); assertEquals(0, c.queries().createSqlQuery(String.class, "1 = 1").execute().get().size()); assertEquals(0, c.queries().createSqlQuery(Long.class, "1 = 1").execute().get().size()); Random rnd = new Random(); for (int i = 0; i < keyCnt; i += 1 + rnd.nextInt(3)) { c.putx(i, String.valueOf(rnd.nextInt(valCnt))); if (evictsEnabled() && rnd.nextBoolean()) assertTrue(c.evict(i)); } final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { Random rnd = new Random(); while (!done.get()) { switch (rnd.nextInt(5)) { case 0: c.putx(rnd.nextInt(keyCnt), String.valueOf(rnd.nextInt(valCnt))); break; case 1: if (evictsEnabled()) c.evict(rnd.nextInt(keyCnt)); break; case 2: c.remove(rnd.nextInt(keyCnt)); break; case 3: c.get(rnd.nextInt(keyCnt)); break; case 4: GridCacheQuery<Map.Entry<Integer, String>> qry = c.queries().createSqlQuery( String.class, "_val between ? and ?"); int from = rnd.nextInt(valCnt); GridCacheQueryFuture<Map.Entry<Integer, String>> fut = qry.execute(String.valueOf(from), String.valueOf(from + 250)); Collection<Map.Entry<Integer, String>> res = fut.get(); for (Map.Entry<Integer, String> ignored : res) { //No-op. } } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void _testMultiThreadedSwapUnswapLong() throws Exception { // TODO GG-9141 int threadCnt = 150; final int keyCnt = 2000; final int valCnt = 10000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, Long> c = g.cache(null); assertEquals(0, g.cache(null).size()); assertEquals(0, c.queries().createSqlQuery(String.class, "1 = 1").execute().get().size()); assertEquals(0, c.queries().createSqlQuery(Long.class, "1 = 1").execute().get().size()); Random rnd = new Random(); for (int i = 0; i < keyCnt; i += 1 + rnd.nextInt(3)) { c.putx(i, (long)rnd.nextInt(valCnt)); if (evictsEnabled() && rnd.nextBoolean()) assertTrue(c.evict(i)); } final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { Random rnd = new Random(); while (!done.get()) { int key = rnd.nextInt(keyCnt); switch (rnd.nextInt(5)) { case 0: c.putx(key, (long)rnd.nextInt(valCnt)); break; case 1: if (evictsEnabled()) c.evict(key); break; case 2: c.remove(key); break; case 3: c.get(key); break; case 4: GridCacheQuery<Map.Entry<Integer, Long>> qry = c.queries().createSqlQuery( Long.class, "_val between ? and ?"); int from = rnd.nextInt(valCnt); GridCacheQueryFuture<Map.Entry<Integer, Long>> f = qry.execute(from, from + 250); Collection<Map.Entry<Integer, Long>> res = f.get(); for (Map.Entry<Integer, Long> ignored : res) { //No-op. } } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void _testMultiThreadedSwapUnswapLongString() throws Exception { // TODO GG-9141 int threadCnt = 150; final int keyCnt = 2000; final int valCnt = 10000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, Object> c = g.cache(null); assertEquals(0, g.cache(null).size()); assertEquals(0, c.offHeapEntriesCount()); // assertEquals(0, c.swapKeys()); assertEquals(0, c.queries().createSqlQuery(String.class, "1 = 1").execute().get().size()); assertEquals(0, c.queries().createSqlQuery(Long.class, "1 = 1").execute().get().size()); Random rnd = new Random(); for (int i = 0; i < keyCnt; i += 1 + rnd.nextInt(3)) { c.putx(i, rnd.nextBoolean() ? (long)rnd.nextInt(valCnt) : String.valueOf(rnd.nextInt(valCnt))); if (evictsEnabled() && rnd.nextBoolean()) assertTrue(c.evict(i)); } final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { Random rnd = new Random(); while (!done.get()) { int key = rnd.nextInt(keyCnt); switch (rnd.nextInt(5)) { case 0: c.putx(key, rnd.nextBoolean() ? (long)rnd.nextInt(valCnt) : String.valueOf(rnd.nextInt(valCnt))); break; case 1: if (evictsEnabled()) c.evict(key); break; case 2: c.remove(key); break; case 3: c.get(key); break; case 4: GridCacheQuery<Map.Entry<Integer, Object>> qry = c.queries().createSqlQuery( rnd.nextBoolean() ? Long.class : String.class, "_val between ? and ?"); int from = rnd.nextInt(valCnt); GridCacheQueryFuture<Map.Entry<Integer, Object>> f = qry.execute(from, from + 250); Collection<Map.Entry<Integer, Object>> res = f.get(); for (Map.Entry<Integer, Object> ignored : res) { //No-op. } } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void _testMultiThreadedSwapUnswapObject() throws Exception { // TODO GG-9141 int threadCnt = 50; final int keyCnt = 4000; final int valCnt = 10000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, TestValue> c = g.cache(null); assertEquals(0, g.cache(null).size()); assertEquals(0, c.queries().createSqlQuery(String.class, "1 = 1").execute().get().size()); assertEquals(0, c.queries().createSqlQuery(Long.class, "1 = 1").execute().get().size()); Random rnd = new Random(); for (int i = 0; i < keyCnt; i += 1 + rnd.nextInt(3)) { c.putx(i, new TestValue(rnd.nextInt(valCnt))); if (evictsEnabled() && rnd.nextBoolean()) assertTrue(c.evict(i)); } final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { Random rnd = new Random(); while (!done.get()) { int key = rnd.nextInt(keyCnt); switch (rnd.nextInt(5)) { case 0: c.putx(key, new TestValue(rnd.nextInt(valCnt))); break; case 1: if (evictsEnabled()) c.evict(key); break; case 2: c.remove(key); break; case 3: c.get(key); break; case 4: GridCacheQuery<Map.Entry<Integer, TestValue>> qry = c.queries().createSqlQuery( Long.class, "TestValue.val between ? and ?"); int from = rnd.nextInt(valCnt); GridCacheQueryFuture<Map.Entry<Integer, TestValue>> f = qry.execute(from, from + 250); Collection<Map.Entry<Integer, TestValue>> res = f.get(); for (Map.Entry<Integer, TestValue> ignored : res) { //No-op. } } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedSameQuery() throws Exception { int threadCnt = 50; final int keyCnt = 10; final int logMod = 5000; final Ignite g = grid(0); // Put test values into cache. GridCache<Integer, Integer> c = g.cache(null); for (int i = 0; i < keyCnt; i++) { c.putx(i, i); info("Affinity [key=" + i + ", aff=" + c.affinity().mapKeyToPrimaryAndBackups(i).iterator().next().id() + ']'); assertTrue(c.evict(i)); } final AtomicInteger cnt = new AtomicInteger(); final AtomicBoolean done = new AtomicBoolean(); final GridCacheQuery<Map.Entry<Integer, Integer>> qry = c.queries().createSqlQuery(Integer.class, "_val >= 0"); IgniteFuture<?> fut = multithreadedAsync( new CAX() { @Override public void applyx() throws IgniteCheckedException { int iter = 0; while (!done.get() && !Thread.currentThread().isInterrupted()) { iter++; GridCacheQueryFuture<Map.Entry<Integer, Integer>> fut = qry.execute(); Collection<Map.Entry<Integer, Integer>> entries = fut.get(); assert entries != null; assertEquals("Query results [entries=" + entries + ", aff=" + affinityNodes(entries, g) + ", iteration=" + iter + ']', keyCnt, entries.size()); if (cnt.incrementAndGet() % logMod == 0) { GridCacheQueryManager<Object, Object> qryMgr = ((GridKernal)g).internalCache().context().queries(); assert qryMgr != null; qryMgr.printMemoryStats(); } } } }, threadCnt); Thread.sleep(DURATION); info("Finishing test..."); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedNewQueries() throws Exception { int threadCnt = 50; final int keyCnt = 10; final int logMod = 5000; final Ignite g = grid(0); // Put test values into cache. final GridCache<Integer, Integer> c = g.cache(null); for (int i = 0; i < keyCnt; i++) { c.putx(i, i); assertTrue(c.evict(i)); } final AtomicInteger cnt = new AtomicInteger(); final AtomicBoolean done = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { int iter = 0; while (!done.get() && !Thread.currentThread().isInterrupted()) { iter++; GridCacheQuery<Map.Entry<Integer, Integer>> qry = c.queries().createSqlQuery(Integer.class, "_val >= 0"); GridCacheQueryFuture<Map.Entry<Integer, Integer>> fut = qry.execute(); Collection<Map.Entry<Integer, Integer>> entries = fut.get(); assert entries != null; assertEquals("Entries count is not as expected on iteration: " + iter, keyCnt, entries.size()); if (cnt.incrementAndGet() % logMod == 0) { GridCacheQueryManager<Object, Object> qryMgr = ((GridKernal)g).internalCache().context().queries(); assert qryMgr != null; qryMgr.printMemoryStats(); } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedReduceQuery() throws Exception { int threadCnt = 50; int keyCnt = 10; final int logMod = 5000; final Ignite g = grid(0); // Put test values into cache. GridCache<Integer, Integer> c = g.cache(null); for (int i = 0; i < keyCnt; i++) c.putx(i, i); final GridCacheQuery<Map.Entry<Integer, Integer>> rdcQry = c.queries().createSqlQuery(Integer.class, "_val > 1 and _val < 4"); rdcQry.includeBackups(true); rdcQry.keepAll(true); final IgniteReducer<Map.Entry<Integer, Integer>, Integer> rmtRdc = new IgniteReducer<Map.Entry<Integer, Integer>, Integer>() { /** Reducer result. */ private int res; @Override public boolean collect(Map.Entry<Integer, Integer> e) { res += e.getKey(); return true; } @Override public Integer reduce() { return res; } }; final AtomicInteger cnt = new AtomicInteger(); final AtomicBoolean stop = new AtomicBoolean(); IgniteFuture<?> fut = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { while (!stop.get()) { Collection<Integer> rmtVals = rdcQry.execute(rmtRdc).get(); assertEquals(GRID_CNT, rmtVals.size()); Iterator<Integer> reduceIter = rmtVals.iterator(); assert reduceIter != null; for (int i = 0; i < GRID_CNT; i++) { assert reduceIter.hasNext(); assertEquals(Integer.valueOf(5), reduceIter.next()); } Collection<Integer> res = rdcQry.execute(rmtRdc).get(); int val = F.sumInt(res); int expVal = 5 * GRID_CNT; assertEquals(expVal, val); if (cnt.incrementAndGet() % logMod == 0) { GridCacheQueryManager<Object, Object> qryMgr = ((GridKernal)g).internalCache().context().queries(); assert qryMgr != null; qryMgr.printMemoryStats(); } } } }, threadCnt); Thread.sleep(DURATION); stop.set(true); fut.get(); } /** * JUnit. * * @throws Exception If failed. */ @SuppressWarnings({"TooBroadScope"}) public void testMultiThreadedScanQuery() throws Exception { int threadCnt = 50; final int keyCnt = 500; final int logMod = 5000; final Ignite g = grid(0); // Put test values into cache. GridCache<Integer, Integer> c = g.cache(null); for (int i = 0; i < keyCnt; i++) c.putx(i, i); final AtomicInteger cnt = new AtomicInteger(); final AtomicBoolean done = new AtomicBoolean(); final GridCacheQuery<Map.Entry<Integer, Integer>> qry = c.queries().createScanQuery(null); IgniteFuture<?> fut = multithreadedAsync( new CAX() { @Override public void applyx() throws IgniteCheckedException { int iter = 0; while (!done.get() && !Thread.currentThread().isInterrupted()) { iter++; GridCacheQueryFuture<Map.Entry<Integer, Integer>> fut = qry.execute(); Collection<Map.Entry<Integer, Integer>> entries = fut.get(); assert entries != null; assertEquals("Entries count is not as expected on iteration: " + iter, keyCnt, entries.size()); if (cnt.incrementAndGet() % logMod == 0) { GridCacheQueryManager<Object, Object> qryMgr = ((GridKernal)g).internalCache().context().queries(); assert qryMgr != null; qryMgr.printMemoryStats(); } } } }, threadCnt); Thread.sleep(DURATION); done.set(true); fut.get(); } /** * Test value. */ private static class TestValue implements Serializable { /** Value. */ @GridCacheQuerySqlField private int val; /** * @param val Value. */ private TestValue(int val) { this.val = val; } /** * @return Value. */ public int value() { return val; } } }
GG-9141 - Enabled passing tests.
modules/indexing/src/test/java/org/gridgain/grid/kernal/processors/cache/GridCacheQueryMultiThreadedSelfTest.java
GG-9141 - Enabled passing tests.
<ide><path>odules/indexing/src/test/java/org/gridgain/grid/kernal/processors/cache/GridCacheQueryMultiThreadedSelfTest.java <ide> import org.apache.ignite.spi.discovery.tcp.ipfinder.*; <ide> import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; <ide> import org.apache.ignite.spi.swapspace.file.*; <del>import org.gridgain.grid.*; <ide> import org.gridgain.grid.cache.*; <ide> import org.gridgain.grid.cache.eviction.lru.*; <ide> import org.gridgain.grid.cache.query.*; <ide> * @throws Exception If failed. <ide> */ <ide> @SuppressWarnings({"TooBroadScope"}) <del> public void _testMultiThreadedSwapUnswapString() throws Exception { // TODO GG-9141 <add> public void testMultiThreadedSwapUnswapString() throws Exception { <ide> int threadCnt = 150; <ide> final int keyCnt = 2000; <ide> final int valCnt = 10000; <ide> * @throws Exception If failed. <ide> */ <ide> @SuppressWarnings({"TooBroadScope"}) <del> public void _testMultiThreadedSwapUnswapLong() throws Exception { // TODO GG-9141 <add> public void testMultiThreadedSwapUnswapLong() throws Exception { <ide> int threadCnt = 150; <ide> final int keyCnt = 2000; <ide> final int valCnt = 10000; <ide> * @throws Exception If failed. <ide> */ <ide> @SuppressWarnings({"TooBroadScope"}) <del> public void _testMultiThreadedSwapUnswapLongString() throws Exception { // TODO GG-9141 <add> public void testMultiThreadedSwapUnswapLongString() throws Exception { <ide> int threadCnt = 150; <ide> final int keyCnt = 2000; <ide> final int valCnt = 10000; <ide> * @throws Exception If failed. <ide> */ <ide> @SuppressWarnings({"TooBroadScope"}) <del> public void _testMultiThreadedSwapUnswapObject() throws Exception { // TODO GG-9141 <add> public void testMultiThreadedSwapUnswapObject() throws Exception { <ide> int threadCnt = 50; <ide> final int keyCnt = 4000; <ide> final int valCnt = 10000;
JavaScript
agpl-3.0
8bc8259d4baa10b6aa44669496cc8a99a97bb7d4
0
htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID
/* -*- mode: espresso; espresso-indent-level: 2; indent-tabs-mode: nil -*- */ /* vim: set softtabstop=2 shiftwidth=2 tabstop=2 expandtab: */ // TODO check all other TODOS /** Namespace where Node instances are created and edited. */ var SkeletonElements = new function() { var active_skeleton_color = "rgb(255,255,0)"; var inactive_skeleton_color = "rgb(255,0,255)"; var inactive_skeleton_color_above = "rgb(0,0,255)"; var inactive_skeleton_color_below = "rgb(255,0,0)"; var root_node_color = "rgb(255, 0, 0)"; var leaf_node_color = "rgb(128, 0, 0)"; var TYPE_NODE = "treenode"; var TYPE_CONNECTORNODE = "connector"; // For drawing: var NODE_RADIUS = 3; var CATCH_RADIUS = 8; var DISABLED = -1; // ID of the disabled nodes // Two arrays containing all created Node and ConnectorNode, for their reuse. var nodePool = []; var connectorPool = []; var arrowPool = []; // The two corresponding indices in the pool for the next available instance for reuse var nextNodeIndex = 0; var nextConnectorIndex = 0; var nextArrowIndex = 0; this.resetCache = function() { nextNodeIndex = 0; nextConnectorIndex = 0; nextArrowIndex = 0; }; this.clearCache = function() { nodePool.splice(0).forEach(obliterateNode); connectorPool.splice(0).forEach(obliterateConnectorNode); arrowPool.splice(0).forEach(function(arrow) { arrow.obliterate(); }); nextNodeIndex = 0; nextConnectorIndex = 0; nextArrowIndex = 0; }; /** Disable all cached Node instances at or beyond the cutoff index, * preserving up to 100 disabled nodes and 20 disabled connector nodes, * and removing the rest from the cache. */ this.disableBeyond = function(nodeCuttoff, connectorCuttoff) { if (nodeCuttoff < nodePool.length) { // Cut cache array beyond desired cut off point plus 100, and obliterate nodes if (nodePool.length > nodeCuttoff + 100) { nodePool.splice(nodeCuttoff + 100).forEach(obliterateNode); } // Disable nodes from cut off to new ending of node pool array nodePool.slice(nodeCuttoff).forEach(disableNode); } // idem for connectorNode if (connectorCuttoff < connectorPool.length) { if (connectorPool.length > connectorCuttoff + 20) { connectorPool.splice(connectorCuttoff + 20).forEach(obliterateConnectorNode); } connectorPool.slice(connectorCuttoff).forEach(disableConnectorNode); } }; this.disableRemainingArrows = function() { // Cur cache array beyond used arrows plus 50, and obliterate the rest if (nextArrowIndex + 50 < arrowPool.length) { arrowPool.splice(nextArrowIndex + 50).forEach(function(arrow) { arrow.obliterate(); }); } // Disable unused arrows arrowPool.splice(nextArrowIndex).forEach(function(arrow) { arrow.disable(); }); }; /** Surrogate constructor that may reuse an existing, cached Node instance currently not in use. * Appends any newly created instances to the pool. */ this.newNode = function( id, // unique id for the node from the database paper, // the raphael paper this node is drawn to parent, // the parent node, if present within the subset of nodes retrieved for display; otherwise null. parent_id, // the id of the parent node, or null if it is root radius, x, // the x coordinate in pixel coordinates y, // y coordinates z, // z coordinates zdiff, // the different from the current slices confidence, skeleton_id, // the id of the skeleton this node is an element of can_edit) // a boolean combining (is_superuser or user owns the node) { var node; if (nextNodeIndex < nodePool.length) { node = nodePool[nextNodeIndex]; reuseNode(node, id, parent, parent_id, radius, x, y, z, zdiff, confidence, skeleton_id, can_edit); } else { node = new this.Node(id, paper, parent, parent_id, radius, x, y, z, zdiff, confidence, skeleton_id, can_edit); nodePool.push(node); } nextNodeIndex += 1; return node; }; /** Constructor for Node instances. */ this.Node = function( id, // unique id for the node from the database paper, // the raphael paper this node is drawn to parent, // the parent node (may be null if the node is not loaded) parent_id, // is null only for the root node radius, // the radius x, // the x coordinate in pixel coordinates y, // y coordinates z, // z coordinates zdiff, // the different from the current slices confidence, skeleton_id, // the id of the skeleton this node is an element of can_edit) { this.id = id; this.type = TYPE_NODE; this.paper = paper; this.parent = parent; this.parent_id = parent_id; this.children = {}; this.numberOfChildren = 0; this.radius = radius; // the radius as stored in the database this.r = NODE_RADIUS; // for drawing this.x = x; this.y = y; this.z = z; this.zdiff = zdiff; this.shouldDisplay = displayTreenode; this.confidence = confidence; this.skeleton_id = skeleton_id; this.can_edit = can_edit; this.isroot = null === parent_id || isNaN(parent_id) || parseInt(parent_id) < 0; this.fillcolor = inactive_skeleton_color; this.c = null; // The Raphael circle for drawing this.mc = null; // The Raphael circle for mouse actions (it's a bit larger) this.line = paper.path(); // The Raphael line element that represents an edge between nodes // NOT needed this.line.toBack(); // The member functions: this.setXY = setXY; this.drawEdges = nodeDrawEdges; this.draw = draw; this.deletenode = nodeDelete; this.setColor = setColor; this.colorFromZDiff = nodeColorFromZDiff; this.createCircle = createCircle; this.addChildNode = function(childNode) { if (!this.children.hasOwnProperty(childNode.id)) { ++ this.numberOfChildren; } // Still set new node object in any case, since // node objects can be reused for different IDs this.children[childNode.id] = childNode; }; }; /** Prepare node for removal from cache. */ var obliterateNode = function(node) { node.id = null; node.parent = null; node.parent_id = null; node.type = null; node.children = null; node.color = null; if (node.c) { node.c.remove(); node.c = null; mouseEventManager.forget(node.mc, TYPE_NODE); node.mc.catmaidNode = null; // break circular reference node.mc.remove(); node.mc = null; } if (node.line) { node.line.remove(); node.line = null; } if (node.number_text) { node.number_text.remove(); node.number_text = null; } node.paper = null; // Note: mouse event handlers are removed by c.remove and mc.remove() }; /** Before reusing a node, clear all the member variables that * are relevant to the skeleton structure. * All numeric variables will be overwritten, * and the c, mc and line will be reused. */ var disableNode = function(node) { node.id = DISABLED; node.parent = null; node.parent_id = DISABLED; node.children = {}; node.numberOfChildren = 0; if (node.c) { node.c.hide(); node.mc.hide(); } if (node.line) { node.line.hide(); } if (node.number_text) { node.number_text.remove(); node.number_text = null; } }; /** Takes an existing Node and sets all the proper members as given, and resets its children. */ var reuseNode = function(node, id, parent, parent_id, radius, x, y, z, zdiff, confidence, skeleton_id, can_edit) { node.id = id; node.parent = parent; node.parent_id = parent_id; node.children = {}; node.numberOfChildren = 0; node.radius = radius; // the radius as stored in the database node.x = x; node.y = y; node.z = z; node.zdiff = zdiff; node.shouldDisplay = displayTreenode; node.confidence = confidence; node.skeleton_id = skeleton_id; node.isroot = null === parent_id || isNaN(parent_id) || parseInt(parent_id) < 0; node.can_edit = can_edit; if (node.c) { if (0 !== zdiff) { node.c.hide(); node.mc.hide(); } else { var newCoords = {cx: x, cy: y}; node.c.attr(newCoords); node.mc.attr(newCoords); } } if (node.line) { node.line.hide(); } if (node.number_text) { node.number_text.remove(); node.number_text = null; } }; /** Trigger the redrawing of the lines with parent, children and connectors. * Here, 'this' is the node, given that it is called in the context of the node only. */ var nodeDrawEdges = function(toChildren) { var ID, children = this.children, child; if (toChildren) { for (ID in children) { if (children.hasOwnProperty(ID)) { child = children[ID]; if (displayBetweenNodes(this, child)) drawLineToParent(children[ID]); } } } if (displayBetweenNodes(this, this.parent)) { drawLineToParent(this); } }; /** Update the local x,y coordinates of the node * Update them for the raphael objects as well. * Does NOT redraw the edges. * Here 'this' refers to the node. */ var setXY = function(xnew, ynew) { this.x = xnew; this.y = ynew; if (this.c) { this.c.attr({ cx: xnew, cy: ynew }); this.mc.attr({ cx: xnew, cy: ynew }); } }; var updateConfidenceText = function (x, y, parentx, parenty, fillColor, confidence, paper, existing) { var result, numberOffset = 12, confidenceFontSize = '20px', xdiff = parentx - x, ydiff = parenty - y, length = Math.sqrt(xdiff*xdiff + ydiff*ydiff), nx = -ydiff / length, ny = xdiff / length, newConfidenceX = (x + parentx) / 2 + nx * numberOffset, newConfidenceY = (y + parenty) / 2 + ny * numberOffset; if (typeof existing === "undefined") { result = paper.text(newConfidenceX, newConfidenceY, ""+confidence); } else { result = existing; } result.attr({x: newConfidenceX, y: newConfidenceY, 'font-size': confidenceFontSize, stroke: 'black', 'stroke-width': 0.25, fill: fillColor, text: ""+confidence}); return result; }; /** Updates the coordinates of the raphael path * that represents the line from the node to the parent. */ var drawLineToParent = function (node) { var parent = node.parent; var lineColor; if (!displayBetweenNodes(node, parent)) { return; } if (parent) { lineColor = node.colorFromZDiff(parent.zdiff, parent.skeleton_id); if (node.line) { node.line.attr({ path: [ ["M", node.x, node.y], ["L", parent.x, parent.y] ], stroke: lineColor, "stroke-width": 2 }); // May be hidden if the node was reused if ("none" === node.line.node.style.display) { node.line.show(); } } if (node.confidence < 5) { if (node.number_text) { updateConfidenceText( node.x, node.y, parent.x, parent.y, lineColor, node.confidence, node.paper, node.number_text); } else { node.number_text = updateConfidenceText( node.x, node.y, parent.x, parent.y, lineColor, node.confidence, node.paper); } node.number_text.toBack(); } else { if (node.number_text) { node.number_text.remove(); node.number_text = null; } } } }; /** Recreate the GUI components, namely the circle and edges. * Here 'this' refers to the node. * This is called only when creating a single node */ var draw = function() { this.createCircle(); this.drawEdges(); }; /** Delete the node from the database and removes it from * the current view and local objects. * Here 'this' refers to the node. */ var nodeDelete = function (wasActiveNode) { var node = this; requestQueue.register(django_url + project.id + '/treenode/delete', "POST", { pid: project.id, treenode_id: node.id }, function (status, text) { if (status !== 200) { alert("The server returned an unexpected status (" + status + ") " + "with error message:\n" + text); } else { if (text && text !== " ") { var e = $.parseJSON(text); if (e.error) { alert(e.error); } else { // activate parent node when deleted if (wasActiveNode) { var ov = node.paper.catmaidSVGOverlay; if (e.parent_id) { ov.selectNode(e.parent_id); } else { // No parent. But if this node was postsynaptic or presynaptic // to a connector, the connector must be selected: var pp = ov.findConnectors(node.id); // Try first connectors for which node is postsynaptic: if (pp[1].length > 0) { ov.selectNode(pp[1][0]); // Then try connectors for which node is presynaptic } else if (pp[0].length > 0) { ov.selectNode(pp[0][0]); } else { ov.activateNode(null); } // Refresh object tree as well, given that the node had no parent and therefore the deletion of its skeleton was triggered ObjectTree.refresh(); } } node.needsync = false; // Redraw everything for now node.paper.catmaidSVGOverlay.updateNodes(); } } } return true; }); }; /** Set the node fill color depending on its distance from the * current slice, whether it's the active node, the root node, or in * an active skeleton. * Here 'this' refers to the node. */ var setColor = function () { if (this.id === SkeletonAnnotations.getActiveNodeId()) { // The active node is always in green: this.fillcolor = SkeletonAnnotations.getActiveNodeColor(); } else if (this.isroot) { // The root node should be colored red unless it's active: this.fillcolor = root_node_color; } else if ((this.type !== TYPE_CONNECTORNODE) && (this.numberOfChildren === 0)) { this.fillcolor = leaf_node_color; } else { // If none of the above applies, just colour according to the z difference. this.fillcolor = this.colorFromZDiff(this.zdiff, this.skeleton_id); } if (this.c) { this.c.attr({ fill: this.fillcolor }); } }; /** Return a color depending upon some conditions, * such as whether the zdiff with the current section is positive, negative, or zero, * and whether the node belongs to the active skeleton. */ var nodeColorFromZDiff = function(zdiff, skeleton_id) { // zdiff is in sections, therefore the current section is at [0, 1) -- notice 0 is inclusive and 1 is exclusive. if (zdiff >= 1) { return inactive_skeleton_color_above; } else if (zdiff < 0) { return inactive_skeleton_color_below; } else if (skeleton_id === SkeletonAnnotations.getActiveSkeletonId() ) { return active_skeleton_color; } return inactive_skeleton_color; }; var displayTreenode = function () { return this.zdiff >= 0 && this.zdiff < 1; }; var displayConnector = function() { /* Change the constant to 1.5 if you want to see the connector (differently coloured) in the next and previous slices too. */ return this.zdiff >= 0 && this.zdiff < 1; }; var displayBetweenNodes = function(node_a, node_b) { return (node_a && node_a.shouldDisplay()) || (node_b && node_b.shouldDisplay()); }; /** Create the Raphael circle elements if and only if the zdiff is zero, that is, if the node lays on the current section. * Here 'this' refers to the node. * */ var createCircle = function() { if (this.shouldDisplay()) { var paper = this.paper; // c and mc may already exist if the node is being reused if (this.c && this.mc) { } else { // create a raphael circle object this.c = paper.circle(this.x, this.y, this.r); // a raphael circle oversized for the mouse logic this.mc = paper.circle(this.x, this.y, CATCH_RADIUS); mouseEventManager.attach(this.mc, this.type); } this.c.attr({ fill: this.fillcolor, stroke: "none", opacity: 1.0 }); this.mc.attr({ fill: "rgb(0, 1, 0)", stroke: "none", opacity: 0 }); if ("none" === this.c.node.style.display) { this.c.show(); this.mc.show(); } this.mc.catmaidNode = this; // for event handlers } }; /** Event handling functions for 'mc' * Realize that: * mc.prev === c * and that, on constructing the mc, we declared: * mc.catmaidNode = this; // 'this' is the node * * Below, the function() is but a namespace that returns the actual nodeAssignEventHandlers function, * which assigns the event handlers to the mc given to it as argument. */ var mouseEventManager = new (function() { /** Variables used for mouse events, which involve a single node at a time. * These are set at mc_start and then used at mc_move. */ var ox = null, oy = null; /** Here 'this' is mc. */ var mc_dblclick = function(e) { if (this.paper.catmaidSVGOverlay.ensureFocused()) { e.stopPropagation(); return; } // Else, do nothing e.stopPropagation(); }; /** * Here 'this' is mc, and treenode is the Node instance */ var mc_click = function(e) { e.stopPropagation(); var node = this.catmaidNode, paper = this.paper, wasActiveNode = false; if (this.paper.catmaidSVGOverlay.ensureFocused()) { return; } if (e.shiftKey) { var atnID = SkeletonAnnotations.getActiveNodeId(); if ((e.ctrlKey || e.metaKey) && e.shiftKey) { if (!mayEdit() || !node.can_edit) { alert("You don't have permission to delete node #" + node.id); return; } // if it is active node, set active node to null if (node.id === atnID) { paper.catmaidSVGOverlay.activateNode(null); wasActiveNode = true; } statusBar.replaceLast("Deleted node #" + node.id); node.deletenode(wasActiveNode); return true; } if (atnID) { var atnType = SkeletonAnnotations.getActiveNodeType(); // connected activated treenode or connectornode // to existing treenode or connectornode // console.log("from source #" + atnID + " to target #" + node.id); if (atnType === TYPE_CONNECTORNODE) { if (!mayEdit()) { alert("You lack permissions to declare node #" + node.id + "as postsynaptic to connector #" + atnID); return; } // careful, atnID is a connector paper.catmaidSVGOverlay.createLink(node.id, atnID, "postsynaptic_to"); // TODO check for error statusBar.replaceLast("Joined node #" + atnID + " to connector #" + node.id); } else if (atnType === TYPE_NODE) { // Joining two skeletons: only possible if one owns both nodes involved // or is a superuser if( node.skeleton_id === SkeletonAnnotations.getActiveSkeletonId() ) { alert('Can not join node with another node of the same skeleton!'); return; } paper.catmaidSVGOverlay.createTreenodeLink(atnID, node.id); // TODO check for error statusBar.replaceLast("Joined node #" + atnID + " to node #" + node.id); } } else { alert("Nothing to join without an active node!"); } } else { // activate this node paper.catmaidSVGOverlay.activateNode(node); // stop propagation of the event } }; /** Here 'this' is mc, and node is the Node instance. */ var mc_move = function(dx, dy, x, y, e) { if (is_middle_click(e)) { // Allow middle-click panning return; } if (!ox || !oy) { // Not properly initialized with mc_start e.stopPropagation(); return; } e.stopPropagation(); if (e.shiftKey) { return; } if (!mayEdit() || !this.catmaidNode.can_edit) { statusBar.replaceLast("You don't have permission to move node #" + this.catmaidNode.id); return; } var node = this.catmaidNode, mc = this, c = this.prev; if( node.id !== SkeletonAnnotations.getActiveNodeId() ) return; node.x = ox + dx; node.y = oy + dy; c.attr({ cx: node.x, cy: node.y }); mc.attr({ cx: node.x, cy: node.y }); node.drawEdges(true); // TODO for connector this is overkill statusBar.replaceLast("Moving node #" + node.id); node.needsync = true; }; /** Here 'this' is mc. */ var mc_up = function(e) { ox = null; oy = null; e.stopPropagation(); var c = this.prev; c.attr({ opacity: 1 }); }; /** Here 'this' is mc. */ var mc_start = function(x, y, e) { if (is_middle_click(e)) { // Allow middle-click panning return; } e.stopPropagation(); var node = this.catmaidNode, c = this.prev; // If not trying to join or remove a node, but merely click on it to drag it or select it: if (!e.shiftKey && !e.ctrlKey && !e.metaKey) { this.paper.catmaidSVGOverlay.activateNode(node); } ox = node.x; oy = node.y; c.attr({ opacity: 0.7 }); }; var mc_mousedown = function(e) { if (is_middle_click(e)) { // Allow middle-click panning return; } e.stopPropagation(); }; var connector_mc_click = function(e) { e.stopPropagation(); var atnID = SkeletonAnnotations.getActiveNodeId(), connectornode = this.catmaidNode, paper = this.paper, wasActiveNode = false; if (this.paper.catmaidSVGOverlay.ensureFocused()) { return; } // return some log information when clicked on the node // this usually refers here to the mc object if (e.shiftKey) { if ((e.ctrlKey || e.metaKey) && e.shiftKey) { if (connectornode.id === atnID) { paper.catmaidSVGOverlay.activateNode(null); wasActiveNode = true; } statusBar.replaceLast("Deleted connector #" + connectornode.id); connectornode.deletenode(wasActiveNode); return true; } if (atnID) { var atnType = SkeletonAnnotations.getActiveNodeType(); // connected activated treenode or connectornode // to existing treenode or connectornode if (atnType === TYPE_CONNECTORNODE) { alert("Can not join two connector nodes!"); } else if (atnType === TYPE_NODE) { paper.catmaidSVGOverlay.createLink(atnID, connectornode.id, "presynaptic_to"); statusBar.replaceLast("Joined node #" + atnID + " with connector #" + connectornode.id); } } else { $('#growl-alert').growlAlert({ autoShow: true, content: 'You need to activate a node before joining it to a connector node!', title: 'BEWARE', position: 'top-right', delayTime: 2500, onComplete: function() { g.remove(); } }); } } else { //console.log("Try to activate node"); // activate this node paper.catmaidSVGOverlay.activateNode(connectornode); } }; this.attach = function(mc, type) { mc.drag(mc_move, mc_start, mc_up); mc.mousedown(mc_mousedown); mc.dblclick(mc_dblclick); if (TYPE_NODE === type) { mc.click(mc_click); } else { // TYPE_CONNECTORNODE mc.click(connector_mc_click); } }; this.forget = function(mc, type) { mc.undrag(); mc.unmousedown(mc_mousedown); mc.undblclick(mc_dblclick); if (TYPE_NODE === type) { mc.unclick(mc_click); } else { // TYPE_CONNECTORNODE mc.unclick(connector_mc_click); } }; })(); // TODO must reuse nodes instead of creating them new, to avoid DOM insertions. // -- well, it can: just leave as members of each the functions that are really different. // Identical functions: setXY, setColor, createCircle, deletenode (but for the php URL), some of the sub-functions of createEventHandlers // Also, there shouldn't be a "needsync" flag. Instead, push the node to an array named "needSyncWithDB". Will avoid looping. // Regarding the nodes map: it is an array of keys over objects stored in a a cache of nodes that are already inserted into the DOM and that can be reused. /** Surrogate constructor for ConnectorNode. * See "newNode" for explanations. */ this.newConnectorNode = function( id, // unique id for the node from the database paper, // the raphael paper this node is drawn to x, // the x coordinate in pixel coordinates y, // y coordinates z, // z coordinates zdiff, // the different from the current slices confidence, can_edit) // a boolean combining (is_superuser or user owns the node) { var connector; if (nextConnectorIndex < connectorPool.length) { connector = connectorPool[nextConnectorIndex]; reuseConnectorNode(connector, id, x, y, z, zdiff, confidence, can_edit); } else { connector = new this.ConnectorNode(id, paper, x, y, z, zdiff, confidence, can_edit); connectorPool.push(connector); } nextConnectorIndex += 1; return connector; }; /** * Constructor for ConnectorNode. */ this.ConnectorNode = function ( id, // unique id for the node from the database paper, // the raphael paper this node is drawn to x, // the x coordinate in pixel coordinates y, // y coordinates z, // z coordinates zdiff, // the different from the current slices confidence, can_edit) // whether the logged in user has permissions to edit this node -- the server will in any case enforce permissions; this is for proper GUI flow { this.id = id; this.type = TYPE_CONNECTORNODE; this.needsync = false; // state variable; whether this node is already synchronized with the database this.x = x; // local screen coordinates relative to the div, in pixel coordinates this.y = y; this.z = z; this.zdiff = zdiff; this.shouldDisplay = displayConnector; this.confidence = confidence; this.can_edit = can_edit; this.paper = paper; this.pregroup = {}; // set of presynaptic treenodes this.postgroup = {}; // set of postsynaptic treenodes this.r = 8; this.c = null; // The Raphael circle for drawing this.mc = null; // The Raphael circle for mouse actions (it's a bit larger) this.preLines = null; // Array of ArrowLine to the presynaptic nodes this.postLines = null; // Array of ArrowLine to the postsynaptic nodes this.fillcolor = null; // Member functions this.setXY = setXY; this.setColor = setColor; this.colorFromZDiff = connectorColorFromZDiff; this.createCircle = createCircle; this.deletenode = connectorDelete; this.draw = draw; this.drawEdges = connectorDrawEdges; }; var obliterateConnectorNode = function(con) { con.id = null; con.fillcolor = null; if (con.c) { con.c.remove(); mouseEventManager.forget(con.mc, TYPE_CONNECTORNODE); con.mc.catmaidNode = null; con.mc.remove(); } con.pregroup = null; con.postgroup = null; con.paper = null; // Note: mouse event handlers are removed by c.remove and mc.remove() removeConnectorArrows(con); // also removes confidence text associated with edges con.preLines = null; con.postLines = null; }; /** * @param c The Node to reuse * @param id * @param r * @param x * @param y * @param z * @param zdiff */ var reuseConnectorNode = function(c, id, x, y, z, zdiff, confidence, can_edit) { c.id = id; c.x = x; c.y = y; c.z = z; c.zdiff = zdiff; c.shouldDisplay = displayConnector; c.confidence = confidence; c.can_edit = can_edit; c.pregroup = {}; c.postgroup = {}; if (c.c) { if (c.shouldDisplay()) { var newCoords = {cx: x, cy: y}; c.c.attr(newCoords); c.mc.attr(newCoords); } else { c.c.hide(); c.mc.hide(); } } c.preLines = null; c.postLines = null; }; /** * * @param c The ConnectorNode instance to disable */ var disableConnectorNode = function(c) { c.id = DISABLED; if (c.c) { c.c.hide(); c.mc.hide(); } removeConnectorArrows(c); }; /** Here 'this' is the connector node. */ var connectorColorFromZDiff = function(zdiff) { // zdiff is in sections, therefore the current section is at [0, 1) -- notice 0 is inclusive and 1 is exclusive. if (zdiff >= 1) { return "rgb(0, 0, 255)"; } else if (zdiff < 0) { return "rgb(255, 0, 0)"; } else { return "rgb(235, 117, 0)"; } }; /** Delete the connector from the database and removes it from * the current view and local objects. * Here 'this' is the connector node. */ var connectorDelete = function () { var connectornode = this; requestQueue.register(django_url + project.id + '/connector/delete', "POST", { pid: project.id, connector_id: connectornode.id }, function (status, text, xml) { if (status !== 200) { alert("The server returned an unexpected status (" + status + ") " + "with error message:\n" + text); } else { if (text && text !== " ") { var e = $.parseJSON(text); if (e.error) { alert(e.error); } else { var ov = connectornode.paper.catmaidSVGOverlay; // If there was a presynaptic node, select it var preIDs = Object.keys(connectornode.pregroup); var postIDs = Object.keys(connectornode.postgroup); if (preIDs.length > 0) { ov.selectNode(preIDs[0]); } else if (postIDs.length > 0) { ov.selectNode(postIDs[0]); } else { ov.activateNode(null); } connectornode.needsync = false; // Refresh all nodes in any case, to reflect the new state of the database ov.updateNodes(); return true; } } } }); }; /** Disables the ArrowLine object and removes entries from the preLines and postLines. */ var removeConnectorArrows = function(c) { var disable; if (c.preLines || c.postLines) disable = function(arrow) { arrow.disable(); }; if (c.preLines) { c.preLines.forEach(disable); c.preLines = null; } if (c.postLines) { c.postLines.forEach(disable); c.postLines = null; } }; /** * Here 'this' is the connector node. */ var connectorDrawEdges = function(redraw) { var i, tnid, treenode, confidence, pregroup = this.pregroup, postgroup = this.postgroup; if (redraw) { removeConnectorArrows(this); } // re-create for (i in pregroup) { if (pregroup.hasOwnProperty(i)) { treenode = pregroup[i].treenode; confidence = pregroup[i].confidence; if (displayBetweenNodes(this, treenode)) { tnid = treenode.id; if (!this.preLines) this.preLines = []; this.preLines.push(connectorCreateArrow(this, tnid, confidence, true)); } } } for (i in postgroup) { if (postgroup.hasOwnProperty(i)) { treenode = postgroup[i].treenode; confidence = postgroup[i].confidence; if (displayBetweenNodes(this, treenode)) { tnid = treenode.id; if (!this.postLines) this.postLines = []; this.postLines.push(connectorCreateArrow(this, tnid, confidence, false)); } } } }; /** Below, a function that acts as a namespace and assigns to connectorCreateArrow the proper function. * (Notice how it is executed at the end of its declaration. */ var connectorCreateArrow = function() { var PRE_COLOR = "rgb(200, 0, 0)"; var POST_COLOR = "rgb(0, 217, 232)"; var pathString = "M0,0,L1,0"; var arrowString = "M0,0,L-5,-5,L-5,5,L0,0"; var mousedown = (function(e) { e.stopPropagation(); if(!(e.shiftKey && (e.ctrlKey || e.metaKey))) { return; } // 'this' is the arrowPath var updateNodes = this.paper.catmaidSVGOverlay.updateNodes; requestQueue.register(django_url + project.id + '/link/delete', "POST", { pid: project.id, connector_id: this.connector_id, treenode_id: this.treenode_id }, function (status, text) { if (status !== 200) { alert("The server returned an unexpected status (" + status + ") " + "with error message:\n" + text); } else { if (text && text !== " ") { var e = $.parseJSON(text); if (e.error) { alert(e.error); } else { updateNodes(); return true; } } } }); }); /** Constructor method for ArrowLine. */ var ArrowLine = function(paper) { var linePath = paper.path(pathString); var arrowPath = paper.path(arrowString); arrowPath.mousedown(mousedown); var confidence_text; this.init = function(x1, y1, x2, y2, confidence, stroke_color, connectorID, treenodeID) { arrowPath.connector_id = connectorID; arrowPath.treenode_id = treenodeID; this.update(x1, y1, x2, y2, confidence); // Adjust linePath.attr({"stroke": stroke_color, "stroke-width": 2}); // Adjust color arrowPath.attr({ "fill": stroke_color, "stroke": stroke_color }); this.show(); }; this.update = function(x1, y1, x2, y2, confidence) { var rloc = 9; var xdiff = (x2 - x1); var ydiff = (y2 - y1); var le = Math.sqrt(xdiff * xdiff + ydiff * ydiff); if( le === 0 ) { le = 0.9 * rloc; } var F = (1 - rloc / le); var x1new = (x1 - x2) * F + x2; var y1new = (y1 - y2) * F + y2; var x2new = (x2 - x1) * F + x1; var y2new = (y2 - y1) * F + y1; var angle = Raphael.angle(x2new, y2new, x1new, y1new); // Reset transform linePath.transform(""); // Translate, rotate and scale var length = Math.sqrt((x2new - x1new) * (x2new - x1new) + (y2new - y1new) * (y2new - y1new)); linePath.transform( "t" + x1new + "," + y1new + "r" + angle + ",0,0" + "s" + length + "," + length + ",0,0"); // Reset transform arrowPath.transform(""); // Translate and then rotate relative to 0,0 (preconcatenates) arrowPath.transform("t" + x2new + "," + y2new + "r" + angle + ",0,0"); if (confidence_text) { if (confidence < 5) { confidence_text.hide(); } else { updateConfidenceText(x1, y1, x2, y2, stroke_color, confidence, paper, confidence_text); confidence_text.show(); } } else if (confidence < 5) { confidence_text = updateConfidenceText(x1, y1, x2, y2, stroke_color, confidence, paper); } }; this.show = function() { // Ensure visible if ("none" === linePath.node.style.display) { linePath.show(); arrowPath.show(); // show may not enough linePath.node.style.display = "block"; arrowPath.node.style.display = "block"; } }; this.disable = function() { arrowPath.connector_id = null; arrowPath.treenode_id = null; linePath.hide(); arrowPath.hide(); if (confidence_text) confidence_text.hide(); }; this.obliterate = function() { arrowPath.connector_id = null; arrowPath.treenode_id = null; arrowPath.unmousedown(mousedown); arrowPath.remove(); arrowPath = null; linePath.remove(); linePath = null; if (confidence_text) { confidence_text.remove(); confidence_text = null; } paper = null; }; }; /** Return the actual connectorCreateArrow function * The 'arrow' argument is optional, and if not undefined or null, will be reused. */ return function(self, treenode_id, confidence, pre, arrow) { if (!arrow) { if (nextArrowIndex < arrowPool.length) { arrow = arrowPool[nextArrowIndex]; } else { arrow = new ArrowLine(self.paper); arrowPool.push(arrow); } nextArrowIndex += 1; } var src, tgt, color; if (pre) { src = self.pregroup[treenode_id].treenode; tgt = self; color = PRE_COLOR; } else { src = self; tgt = self.postgroup[treenode_id].treenode; color = POST_COLOR; } arrow.init(src.x, src.y, tgt.x, tgt.y, confidence, color, self.id, treenode_id); return arrow; }; }(); var is_middle_click = function(e) { return 2 === e.which; }; }();
django/applications/catmaid/static/widgets/overlay_node.js
/* -*- mode: espresso; espresso-indent-level: 2; indent-tabs-mode: nil -*- */ /* vim: set softtabstop=2 shiftwidth=2 tabstop=2 expandtab: */ // TODO check all other TODOS /** Namespace where Node instances are created and edited. */ var SkeletonElements = new function() { var active_skeleton_color = "rgb(255,255,0)"; var inactive_skeleton_color = "rgb(255,0,255)"; var inactive_skeleton_color_above = "rgb(0,0,255)"; var inactive_skeleton_color_below = "rgb(255,0,0)"; var root_node_color = "rgb(255, 0, 0)"; var leaf_node_color = "rgb(128, 0, 0)"; var TYPE_NODE = "treenode"; var TYPE_CONNECTORNODE = "connector"; // For drawing: var NODE_RADIUS = 3; var CATCH_RADIUS = 8; var DISABLED = -1; // ID of the disabled nodes // Two arrays containing all created Node and ConnectorNode, for their reuse. var nodePool = []; var connectorPool = []; var arrowPool = []; // The two corresponding indices in the pool for the next available instance for reuse var nextNodeIndex = 0; var nextConnectorIndex = 0; var nextArrowIndex = 0; this.resetCache = function() { nextNodeIndex = 0; nextConnectorIndex = 0; nextArrowIndex = 0; }; this.clearCache = function() { nodePool.splice(0).forEach(obliterateNode); connectorPool.splice(0).forEach(obliterateConnectorNode); arrowPool.splice(0).forEach(function(arrow) { arrow.obliterate(); }); nextNodeIndex = 0; nextConnectorIndex = 0; nextArrowIndex = 0; }; /** Disable all cached Node instances at or beyond the cutoff index, * preserving up to 100 disabled nodes and 20 disabled connector nodes, * and removing the rest from the cache. */ this.disableBeyond = function(nodeCuttoff, connectorCuttoff) { if (nodeCuttoff < nodePool.length) { // Cut cache array beyond desired cut off point plus 100, and obliterate nodes if (nodePool.length > nodeCuttoff + 100) { nodePool.splice(nodeCuttoff + 100).forEach(obliterateNode); } // Disable nodes from cut off to new ending of node pool array nodePool.slice(nodeCuttoff).forEach(disableNode); } // idem for connectorNode if (connectorCuttoff < connectorPool.length) { if (connectorPool.length > connectorCuttoff + 20) { connectorPool.splice(connectorCuttoff + 20).forEach(obliterateConnectorNode); } connectorPool.slice(connectorCuttoff).forEach(disableConnectorNode); } }; this.disableRemainingArrows = function() { // Cur cache array beyond used arrows plus 50, and obliterate the rest if (nextArrowIndex + 50 < arrowPool.length) { arrowPool.splice(nextArrowIndex + 50).forEach(function(arrow) { arrow.obliterate(); }); } // Disable unused arrows arrowPool.splice(nextArrowIndex).forEach(function(arrow) { arrow.disable(); }); }; /** Surrogate constructor that may reuse an existing, cached Node instance currently not in use. * Appends any newly created instances to the pool. */ this.newNode = function( id, // unique id for the node from the database paper, // the raphael paper this node is drawn to parent, // the parent node, if present within the subset of nodes retrieved for display; otherwise null. parent_id, // the id of the parent node, or null if it is root radius, x, // the x coordinate in pixel coordinates y, // y coordinates z, // z coordinates zdiff, // the different from the current slices confidence, skeleton_id, // the id of the skeleton this node is an element of can_edit) // a boolean combining (is_superuser or user owns the node) { var node; if (nextNodeIndex < nodePool.length) { node = nodePool[nextNodeIndex]; reuseNode(node, id, parent, parent_id, radius, x, y, z, zdiff, confidence, skeleton_id, can_edit); } else { node = new this.Node(id, paper, parent, parent_id, radius, x, y, z, zdiff, confidence, skeleton_id, can_edit); nodePool.push(node); } nextNodeIndex += 1; return node; }; /** Constructor for Node instances. */ this.Node = function( id, // unique id for the node from the database paper, // the raphael paper this node is drawn to parent, // the parent node (may be null if the node is not loaded) parent_id, // is null only for the root node radius, // the radius x, // the x coordinate in pixel coordinates y, // y coordinates z, // z coordinates zdiff, // the different from the current slices confidence, skeleton_id, // the id of the skeleton this node is an element of can_edit) { this.id = id; this.type = TYPE_NODE; this.paper = paper; this.parent = parent; this.parent_id = parent_id; this.children = {}; this.numberOfChildren = 0; this.radius = radius; // the radius as stored in the database this.r = NODE_RADIUS; // for drawing this.x = x; this.y = y; this.z = z; this.zdiff = zdiff; this.shouldDisplay = displayTreenode; this.confidence = confidence; this.skeleton_id = skeleton_id; this.can_edit = can_edit; this.isroot = null === parent_id || isNaN(parent_id) || parseInt(parent_id) < 0; this.fillcolor = inactive_skeleton_color; this.c = null; // The Raphael circle for drawing this.mc = null; // The Raphael circle for mouse actions (it's a bit larger) this.line = paper.path(); // The Raphael line element that represents an edge between nodes // NOT needed this.line.toBack(); // The member functions: this.setXY = setXY; this.drawEdges = nodeDrawEdges; this.draw = draw; this.deletenode = nodeDelete; this.setColor = setColor; this.colorFromZDiff = nodeColorFromZDiff; this.createCircle = createCircle; this.addChildNode = function(childNode) { if (!this.children.hasOwnProperty(childNode.id)) { ++ this.numberOfChildren; } // Still set new node object in any case, since // node objects can be reused for different IDs this.children[childNode.id] = childNode; }; }; /** Prepare node for removal from cache. */ var obliterateNode = function(node) { node.id = null; node.parent = null; node.parent_id = null; node.type = null; node.children = null; node.color = null; if (node.c) { node.c.remove(); node.c = null; mouseEventManager.forget(node.mc, TYPE_NODE); node.mc.catmaidNode = null; // break circular reference node.mc.remove(); node.mc = null; } if (node.line) { node.line.remove(); node.line = null; } if (node.number_text) { node.number_text.remove(); node.number_text = null; } node.paper = null; // Note: mouse event handlers are removed by c.remove and mc.remove() }; /** Before reusing a node, clear all the member variables that * are relevant to the skeleton structure. * All numeric variables will be overwritten, * and the c, mc and line will be reused. */ var disableNode = function(node) { node.id = DISABLED; node.parent = null; node.parent_id = DISABLED; node.children = {}; node.numberOfChildren = 0; if (node.c) { node.c.hide(); node.mc.hide(); } if (node.line) { node.line.hide(); } if (node.number_text) { node.number_text.remove(); node.number_text = null; } }; /** Takes an existing Node and sets all the proper members as given, and resets its children. */ var reuseNode = function(node, id, parent, parent_id, radius, x, y, z, zdiff, confidence, skeleton_id, can_edit) { node.id = id; node.parent = parent; node.parent_id = parent_id; node.children = {}; node.numberOfChildren = 0; node.radius = radius; // the radius as stored in the database node.x = x; node.y = y; node.z = z; node.zdiff = zdiff; node.shouldDisplay = displayTreenode; node.confidence = confidence; node.skeleton_id = skeleton_id; node.isroot = null === parent_id || isNaN(parent_id) || parseInt(parent_id) < 0; node.can_edit = can_edit; if (node.c) { if (0 !== zdiff) { node.c.hide(); node.mc.hide(); } else { var newCoords = {cx: x, cy: y}; node.c.attr(newCoords); node.mc.attr(newCoords); } } if (node.line) { node.line.hide(); } if (node.number_text) { node.number_text.remove(); node.number_text = null; } }; /** Trigger the redrawing of the lines with parent, children and connectors. * Here, 'this' is the node, given that it is called in the context of the node only. */ var nodeDrawEdges = function(toChildren) { var ID, children = this.children, child; if (toChildren) { for (ID in children) { if (children.hasOwnProperty(ID)) { child = children[ID]; if (displayBetweenNodes(this, child)) drawLineToParent(children[ID]); } } } if (displayBetweenNodes(this, this.parent)) { drawLineToParent(this); } }; /** Update the local x,y coordinates of the node * Update them for the raphael objects as well. * Does NOT redraw the edges. * Here 'this' refers to the node. */ var setXY = function(xnew, ynew) { this.x = xnew; this.y = ynew; if (this.c) { this.c.attr({ cx: xnew, cy: ynew }); this.mc.attr({ cx: xnew, cy: ynew }); } }; var updateConfidenceText = function (x, y, parentx, parenty, fillColor, confidence, paper, existing) { var result, numberOffset = 12, confidenceFontSize = '20px', xdiff = parentx - x, ydiff = parenty - y, length = Math.sqrt(xdiff*xdiff + ydiff*ydiff), nx = -ydiff / length, ny = xdiff / length, newConfidenceX = (x + parentx) / 2 + nx * numberOffset, newConfidenceY = (y + parenty) / 2 + ny * numberOffset; if (typeof existing === "undefined") { result = paper.text(newConfidenceX, newConfidenceY, ""+confidence); } else { result = existing; } result.attr({x: newConfidenceX, y: newConfidenceY, 'font-size': confidenceFontSize, stroke: 'black', 'stroke-width': 0.25, fill: fillColor, text: ""+confidence}); return result; }; /** Updates the coordinates of the raphael path * that represents the line from the node to the parent. */ var drawLineToParent = function (node) { var parent = node.parent; var lineColor; if (!displayBetweenNodes(node, parent)) { return; } if (parent) { lineColor = node.colorFromZDiff(parent.zdiff, parent.skeleton_id); if (node.line) { node.line.attr({ path: [ ["M", node.x, node.y], ["L", parent.x, parent.y] ], stroke: lineColor, "stroke-width": 2 }); // May be hidden if the node was reused if ("none" === node.line.node.style.display) { node.line.show(); } } if (node.confidence < 5) { if (node.number_text) { updateConfidenceText( node.x, node.y, parent.x, parent.y, lineColor, node.confidence, node.paper, node.number_text); } else { node.number_text = updateConfidenceText( node.x, node.y, parent.x, parent.y, lineColor, node.confidence, node.paper); } node.number_text.toBack(); } else { if (node.number_text) { node.number_text.remove(); node.number_text = null; } } } }; /** Recreate the GUI components, namely the circle and edges. * Here 'this' refers to the node. * This is called only when creating a single node */ var draw = function() { this.createCircle(); this.drawEdges(); }; /** Delete the node from the database and removes it from * the current view and local objects. * Here 'this' refers to the node. */ var nodeDelete = function (wasActiveNode) { var node = this; requestQueue.register(django_url + project.id + '/treenode/delete', "POST", { pid: project.id, treenode_id: node.id }, function (status, text) { if (status !== 200) { alert("The server returned an unexpected status (" + status + ") " + "with error message:\n" + text); } else { if (text && text !== " ") { var e = $.parseJSON(text); if (e.error) { alert(e.error); } else { // activate parent node when deleted if (wasActiveNode) { var ov = node.paper.catmaidSVGOverlay; if (e.parent_id) { ov.selectNode(e.parent_id); } else { // No parent. But if this node was postsynaptic or presynaptic // to a connector, the connector must be selected: var pp = ov.findConnectors(node.id); // Try first connectors for which node is postsynaptic: if (pp[1].length > 0) { ov.selectNode(pp[1][0]); // Then try connectors for which node is presynaptic } else if (pp[0].length > 0) { ov.selectNode(pp[0][0]); } else { ov.activateNode(null); } // Refresh object tree as well, given that the node had no parent and therefore the deletion of its skeleton was triggered ObjectTree.refresh(); } } node.needsync = false; // Redraw everything for now node.paper.catmaidSVGOverlay.updateNodes(); } } } return true; }); }; /** Set the node fill color depending on its distance from the * current slice, whether it's the active node, the root node, or in * an active skeleton. * Here 'this' refers to the node. */ var setColor = function () { if (this.id === SkeletonAnnotations.getActiveNodeId()) { // The active node is always in green: this.fillcolor = SkeletonAnnotations.getActiveNodeColor(); } else if (this.isroot) { // The root node should be colored red unless it's active: this.fillcolor = root_node_color; } else if ((this.type !== TYPE_CONNECTORNODE) && (this.numberOfChildren === 0)) { this.fillcolor = leaf_node_color; } else { // If none of the above applies, just colour according to the z difference. this.fillcolor = this.colorFromZDiff(this.zdiff, this.skeleton_id); } if (this.c) { this.c.attr({ fill: this.fillcolor }); } }; /** Return a color depending upon some conditions, * such as whether the zdiff with the current section is positive, negative, or zero, * and whether the node belongs to the active skeleton. */ var nodeColorFromZDiff = function(zdiff, skeleton_id) { // zdiff is in sections, therefore the current section is at [0, 1) -- notice 0 is inclusive and 1 is exclusive. if (zdiff >= 1) { return inactive_skeleton_color_above; } else if (zdiff < 0) { return inactive_skeleton_color_below; } else if (skeleton_id === SkeletonAnnotations.getActiveSkeletonId() ) { return active_skeleton_color; } return inactive_skeleton_color; }; var displayTreenode = function () { return this.zdiff >= 0 && this.zdiff < 1; }; var displayConnector = function() { /* Change the constant to 1.5 if you want to see the connector (differently coloured) in the next and previous slices too. */ return this.zdiff >= 0 && this.zdiff < 1; }; var displayBetweenNodes = function(node_a, node_b) { return (node_a && node_a.shouldDisplay()) || (node_b && node_b.shouldDisplay()); }; /** Create the Raphael circle elements if and only if the zdiff is zero, that is, if the node lays on the current section. * Here 'this' refers to the node. * */ var createCircle = function() { if (this.shouldDisplay()) { var paper = this.paper; // c and mc may already exist if the node is being reused if (this.c && this.mc) { } else { // create a raphael circle object this.c = paper.circle(this.x, this.y, this.r); // a raphael circle oversized for the mouse logic this.mc = paper.circle(this.x, this.y, CATCH_RADIUS); mouseEventManager.attach(this.mc, this.type); } this.c.attr({ fill: this.fillcolor, stroke: "none", opacity: 1.0 }); this.mc.attr({ fill: "rgb(0, 1, 0)", stroke: "none", opacity: 0 }); if ("none" === this.c.node.style.display) { this.c.show(); this.mc.show(); } this.mc.catmaidNode = this; // for event handlers } }; /** Event handling functions for 'mc' * Realize that: * mc.prev === c * and that, on constructing the mc, we declared: * mc.catmaidNode = this; // 'this' is the node * * Below, the function() is but a namespace that returns the actual nodeAssignEventHandlers function, * which assigns the event handlers to the mc given to it as argument. */ var mouseEventManager = new (function() { /** Variables used for mouse events, which involve a single node at a time. * These are set at mc_start and then used at mc_move. */ var ox = null, oy = null; /** Here 'this' is mc. */ var mc_dblclick = function(e) { if (this.paper.catmaidSVGOverlay.ensureFocused()) { e.stopPropagation(); return; } // Else, do nothing e.stopPropagation(); }; /** * Here 'this' is mc, and treenode is the Node instance */ var mc_click = function(e) { e.stopPropagation(); var node = this.catmaidNode, paper = this.paper, wasActiveNode = false; if (this.paper.catmaidSVGOverlay.ensureFocused()) { return; } if (e.shiftKey) { var atnID = SkeletonAnnotations.getActiveNodeId(); if ((e.ctrlKey || e.metaKey) && e.shiftKey) { if (!mayEdit() || !node.can_edit) { alert("You don't have permission to delete node #" + node.id); return; } // if it is active node, set active node to null if (node.id === atnID) { paper.catmaidSVGOverlay.activateNode(null); wasActiveNode = true; } statusBar.replaceLast("Deleted node #" + node.id); node.deletenode(wasActiveNode); return true; } if (atnID) { var atnType = SkeletonAnnotations.getActiveNodeType(); // connected activated treenode or connectornode // to existing treenode or connectornode // console.log("from source #" + atnID + " to target #" + node.id); if (atnType === TYPE_CONNECTORNODE) { if (!mayEdit()) { alert("You lack permissions to declare node #" + node.id + "as postsynaptic to connector #" + atnID); return; } // careful, atnID is a connector paper.catmaidSVGOverlay.createLink(node.id, atnID, "postsynaptic_to"); // TODO check for error statusBar.replaceLast("Joined node #" + atnID + " to connector #" + node.id); } else if (atnType === TYPE_NODE) { // Joining two skeletons: only possible if one owns both nodes involved // or is a superuser if( node.skeleton_id === SkeletonAnnotations.getActiveSkeletonId() ) { alert('Can not join node with another node of the same skeleton!'); return; } paper.catmaidSVGOverlay.createTreenodeLink(atnID, node.id); // TODO check for error statusBar.replaceLast("Joined node #" + atnID + " to node #" + node.id); } } else { alert("Nothing to join without an active node!"); } } else { // activate this node paper.catmaidSVGOverlay.activateNode(node); // stop propagation of the event } }; /** Here 'this' is mc, and node is the Node instance. */ var mc_move = function(dx, dy, x, y, e) { if (is_middle_click(e)) { // Allow middle-click panning return; } if (!ox || !oy) { // Not properly initialized with mc_start e.stopPropagation(); return; } e.stopPropagation(); if (e.shiftKey) { return; } if (!mayEdit() || !this.catmaidNode.can_edit) { statusBar.replaceLast("You don't have permission to move node #" + this.catmaidNode.id); return; } var node = this.catmaidNode, mc = this, c = this.prev; if( node.id !== SkeletonAnnotations.getActiveNodeId() ) return; node.x = ox + dx; node.y = oy + dy; c.attr({ cx: node.x, cy: node.y }); mc.attr({ cx: node.x, cy: node.y }); node.drawEdges(true); // TODO for connector this is overkill statusBar.replaceLast("Moving node #" + node.id); node.needsync = true; }; /** Here 'this' is mc. */ var mc_up = function(e) { ox = null; oy = null; e.stopPropagation(); var c = this.prev; c.attr({ opacity: 1 }); }; /** Here 'this' is mc. */ var mc_start = function(x, y, e) { if (is_middle_click(e)) { // Allow middle-click panning return; } e.stopPropagation(); var node = this.catmaidNode, c = this.prev; // If not trying to join or remove a node, but merely click on it to drag it or select it: if (!e.shiftKey && !e.ctrlKey && !e.metaKey) { this.paper.catmaidSVGOverlay.activateNode(node); } ox = node.x; oy = node.y; c.attr({ opacity: 0.7 }); }; var mc_mousedown = function(e) { if (is_middle_click(e)) { // Allow middle-click panning return; } e.stopPropagation(); }; var connector_mc_click = function(e) { e.stopPropagation(); var atnID = SkeletonAnnotations.getActiveNodeId(), connectornode = this.catmaidNode, paper = this.paper, wasActiveNode = false; if (this.paper.catmaidSVGOverlay.ensureFocused()) { return; } // return some log information when clicked on the node // this usually refers here to the mc object if (e.shiftKey) { if ((e.ctrlKey || e.metaKey) && e.shiftKey) { if (connectornode.id === atnID) { paper.catmaidSVGOverlay.activateNode(null); wasActiveNode = true; } statusBar.replaceLast("Deleted connector #" + connectornode.id); connectornode.deletenode(wasActiveNode); return true; } if (atnID) { var atnType = SkeletonAnnotations.getActiveNodeType(); // connected activated treenode or connectornode // to existing treenode or connectornode if (atnType === TYPE_CONNECTORNODE) { alert("Can not join two connector nodes!"); } else if (atnType === TYPE_NODE) { paper.catmaidSVGOverlay.createLink(atnID, connectornode.id, "presynaptic_to"); statusBar.replaceLast("Joined node #" + atnID + " with connector #" + connectornode.id); } } else { $('#growl-alert').growlAlert({ autoShow: true, content: 'You need to activate a node before joining it to a connector node!', title: 'BEWARE', position: 'top-right', delayTime: 2500, onComplete: function() { g.remove(); } }); } } else { //console.log("Try to activate node"); // activate this node paper.catmaidSVGOverlay.activateNode(connectornode); } }; this.attach = function(mc, type) { mc.drag(mc_move, mc_start, mc_up); mc.mousedown(mc_mousedown); mc.dblclick(mc_dblclick); if (TYPE_NODE === type) { mc.click(mc_click); } else { // TYPE_CONNECTORNODE mc.click(connector_mc_click); } }; this.forget = function(mc, type) { mc.undrag(); mc.unmousedown(mc_mousedown); mc.undblclick(mc_dblclick); if (TYPE_NODE === type) { mc.unclick(mc_click); } else { // TYPE_CONNECTORNODE mc.unclick(connector_mc_click); } }; })(); // TODO must reuse nodes instead of creating them new, to avoid DOM insertions. // -- well, it can: just leave as members of each the functions that are really different. // Identical functions: setXY, setColor, createCircle, deletenode (but for the php URL), some of the sub-functions of createEventHandlers // Also, there shouldn't be a "needsync" flag. Instead, push the node to an array named "needSyncWithDB". Will avoid looping. // Regarding the nodes map: it is an array of keys over objects stored in a a cache of nodes that are already inserted into the DOM and that can be reused. /** Surrogate constructor for ConnectorNode. * See "newNode" for explanations. */ this.newConnectorNode = function( id, // unique id for the node from the database paper, // the raphael paper this node is drawn to x, // the x coordinate in pixel coordinates y, // y coordinates z, // z coordinates zdiff, // the different from the current slices confidence, can_edit) // a boolean combining (is_superuser or user owns the node) { var connector; if (nextConnectorIndex < connectorPool.length) { connector = connectorPool[nextConnectorIndex]; reuseConnectorNode(connector, id, x, y, z, zdiff, confidence, can_edit); } else { connector = new this.ConnectorNode(id, paper, x, y, z, zdiff, confidence, can_edit); connectorPool.push(connector); } nextConnectorIndex += 1; return connector; }; /** * Constructor for ConnectorNode. */ this.ConnectorNode = function ( id, // unique id for the node from the database paper, // the raphael paper this node is drawn to x, // the x coordinate in pixel coordinates y, // y coordinates z, // z coordinates zdiff, // the different from the current slices confidence, can_edit) // whether the logged in user has permissions to edit this node -- the server will in any case enforce permissions; this is for proper GUI flow { this.id = id; this.type = TYPE_CONNECTORNODE; this.needsync = false; // state variable; whether this node is already synchronized with the database this.x = x; // local screen coordinates relative to the div, in pixel coordinates this.y = y; this.z = z; this.zdiff = zdiff; this.shouldDisplay = displayConnector; this.confidence = confidence; this.can_edit = can_edit; this.paper = paper; this.pregroup = {}; // set of presynaptic treenodes this.postgroup = {}; // set of postsynaptic treenodes this.r = 8; this.c = null; // The Raphael circle for drawing this.mc = null; // The Raphael circle for mouse actions (it's a bit larger) this.preLines = {}; // The Raphael edges to the presynaptic nodes // TODO preLines and postLines should be null, and arrays when full this.postLines = {}; // The Raphael edges to the postsynaptic nodes this.fillcolor = null; // Member functions this.setXY = setXY; this.setColor = setColor; this.colorFromZDiff = connectorColorFromZDiff; this.createCircle = createCircle; this.deletenode = connectorDelete; this.draw = draw; this.drawEdges = connectorDrawEdges; }; var obliterateConnectorNode = function(con) { con.id = null; con.fillcolor = null; if (con.c) { con.c.remove(); mouseEventManager.forget(con.mc, TYPE_CONNECTORNODE); con.mc.catmaidNode = null; con.mc.remove(); } con.pregroup = null; con.postgroup = null; con.paper = null; // Note: mouse event handlers are removed by c.remove and mc.remove() removeConnectorArrows(con.preLines, con.postLines); // also removes confidence text associated with edges con.preLines = null; con.postLines = null; }; /** * @param c The Node to reuse * @param id * @param r * @param x * @param y * @param z * @param zdiff */ var reuseConnectorNode = function(c, id, x, y, z, zdiff, confidence, can_edit) { c.id = id; c.x = x; c.y = y; c.z = z; c.zdiff = zdiff; c.shouldDisplay = displayConnector; c.confidence = confidence; c.can_edit = can_edit; c.pregroup = {}; c.postgroup = {}; if (c.c) { if (c.shouldDisplay()) { var newCoords = {cx: x, cy: y}; c.c.attr(newCoords); c.mc.attr(newCoords); } else { c.c.hide(); c.mc.hide(); } } // preLines and postLines are always removed and then recreated when calling drawEdges }; /** * * @param c The ConnectorNode instance to disable */ var disableConnectorNode = function(c) { c.id = DISABLED; if (c.c) { c.c.hide(); c.mc.hide(); } removeConnectorArrows(c.preLines, c.postLines); }; /** Here 'this' is the connector node. */ var connectorColorFromZDiff = function(zdiff) { // zdiff is in sections, therefore the current section is at [0, 1) -- notice 0 is inclusive and 1 is exclusive. if (zdiff >= 1) { return "rgb(0, 0, 255)"; } else if (zdiff < 0) { return "rgb(255, 0, 0)"; } else { return "rgb(235, 117, 0)"; } }; /** Delete the connector from the database and removes it from * the current view and local objects. * Here 'this' is the connector node. */ var connectorDelete = function () { var connectornode = this; requestQueue.register(django_url + project.id + '/connector/delete', "POST", { pid: project.id, connector_id: connectornode.id }, function (status, text, xml) { if (status !== 200) { alert("The server returned an unexpected status (" + status + ") " + "with error message:\n" + text); } else { if (text && text !== " ") { var e = $.parseJSON(text); if (e.error) { alert(e.error); } else { var ov = connectornode.paper.catmaidSVGOverlay; // If there was a presynaptic node, select it var preIDs = Object.keys(connectornode.pregroup); var postIDs = Object.keys(connectornode.postgroup); if (preIDs.length > 0) { ov.selectNode(preIDs[0]); } else if (postIDs.length > 0) { ov.selectNode(postIDs[0]); } else { ov.activateNode(null); } connectornode.needsync = false; // Refresh all nodes in any case, to reflect the new state of the database ov.updateNodes(); return true; } } } }); }; /** Disables the ArrowLine object and removes entries from the preLines and postLines. */ var removeConnectorArrows = function(preLines, postLines) { var i; for (i in preLines) { if (preLines.hasOwnProperty(i)) { preLines[i].disable(); delete preLines[i]; } } for (i in postLines) { if (postLines.hasOwnProperty(i)) { postLines[i].disable(); delete postLines[i]; } } }; /** * Here 'this' is the connector node. */ var connectorDrawEdges = function() { var i, tnid, treenode, confidence, pregroup = this.pregroup, postgroup = this.postgroup; // re-create for (i in pregroup) { if (pregroup.hasOwnProperty(i)) { treenode = pregroup[i].treenode; confidence = pregroup[i].confidence; if (displayBetweenNodes(this, treenode)) { tnid = treenode.id; this.preLines[tnid] = connectorCreateArrow(this, tnid, confidence, true, this.preLines[tnid]); } else if (this.preLines.hasOwnProperty(tnid)) { this.preLines[tnid].disable(); delete this.preLines[tnid]; } } } for (i in postgroup) { if (postgroup.hasOwnProperty(i)) { treenode = postgroup[i].treenode; confidence = postgroup[i].confidence; if (displayBetweenNodes(this, treenode)) { tnid = treenode.id; this.postLines[tnid] = connectorCreateArrow(this, tnid, confidence, false, this.postLines[tnid]); } else if (this.postLines.hasOwnProperty(tnid)) { this.postLines[tnid].disable(); delete this.postLines[tnid]; } } } }; /** Below, a function that acts as a namespace and assigns to connectorCreateArrow the proper function. * (Notice how it is executed at the end of its declaration. */ var connectorCreateArrow = function() { var PRE_COLOR = "rgb(200, 0, 0)"; var POST_COLOR = "rgb(0, 217, 232)"; var pathString = "M0,0,L1,0"; var arrowString = "M0,0,L-5,-5,L-5,5,L0,0"; var mousedown = (function(e) { e.stopPropagation(); if(!(e.shiftKey && (e.ctrlKey || e.metaKey))) { return; } // 'this' is the arrowPath var updateNodes = this.paper.catmaidSVGOverlay.updateNodes; requestQueue.register(django_url + project.id + '/link/delete', "POST", { pid: project.id, connector_id: this.connector_id, treenode_id: this.treenode_id }, function (status, text) { if (status !== 200) { alert("The server returned an unexpected status (" + status + ") " + "with error message:\n" + text); } else { if (text && text !== " ") { var e = $.parseJSON(text); if (e.error) { alert(e.error); } else { updateNodes(); return true; } } } }); }); /** Constructor method for ArrowLine. */ var ArrowLine = function(paper) { var linePath = paper.path(pathString); var arrowPath = paper.path(arrowString); arrowPath.mousedown(mousedown); var confidence_text; this.init = function(x1, y1, x2, y2, confidence, stroke_color, connectorID, treenodeID) { arrowPath.connector_id = connectorID; arrowPath.treenode_id = treenodeID; this.update(x1, y1, x2, y2, confidence); // Adjust linePath.attr({"stroke": stroke_color, "stroke-width": 2}); // Adjust color arrowPath.attr({ "fill": stroke_color, "stroke": stroke_color }); this.show(); }; this.update = function(x1, y1, x2, y2, confidence) { var rloc = 9; var xdiff = (x2 - x1); var ydiff = (y2 - y1); var le = Math.sqrt(xdiff * xdiff + ydiff * ydiff); if( le === 0 ) { le = 0.9 * rloc; } var F = (1 - rloc / le); var x1new = (x1 - x2) * F + x2; var y1new = (y1 - y2) * F + y2; var x2new = (x2 - x1) * F + x1; var y2new = (y2 - y1) * F + y1; var angle = Raphael.angle(x2new, y2new, x1new, y1new); // Reset transform linePath.transform(""); // Translate, rotate and scale var length = Math.sqrt((x2new - x1new) * (x2new - x1new) + (y2new - y1new) * (y2new - y1new)); linePath.transform( "t" + x1new + "," + y1new + "r" + angle + ",0,0" + "s" + length + "," + length + ",0,0"); // Reset transform arrowPath.transform(""); // Translate and then rotate relative to 0,0 (preconcatenates) arrowPath.transform("t" + x2new + "," + y2new + "r" + angle + ",0,0"); if (confidence_text) { if (confidence < 5) { confidence_text.hide(); } else { updateConfidenceText(x1, y1, x2, y2, stroke_color, confidence, paper, confidence_text); confidence_text.show(); } } else if (confidence < 5) { confidence_text = updateConfidenceText(x1, y1, x2, y2, stroke_color, confidence, paper); } }; this.show = function() { // Ensure visible if ("none" === linePath.node.style.display) { linePath.show(); arrowPath.show(); // show may not enough linePath.node.style.display = "block"; arrowPath.node.style.display = "block"; } }; this.disable = function() { arrowPath.connector_id = null; arrowPath.treenode_id = null; linePath.hide(); arrowPath.hide(); if (confidence_text) confidence_text.hide(); }; this.obliterate = function() { arrowPath.connector_id = null; arrowPath.treenode_id = null; arrowPath.unmousedown(mousedown); arrowPath.remove(); arrowPath = null; linePath.remove(); linePath = null; if (confidence_text) { confidence_text.remove(); confidence_text = null; } paper = null; }; }; /** Return the actual connectorCreateArrow function * The 'arrow' argument is optional, and if not undefined or null, will be reused. */ return function(self, treenode_id, confidence, pre, arrow) { if (!arrow) { if (nextArrowIndex < arrowPool.length) { arrow = arrowPool[nextArrowIndex]; } else { arrow = new ArrowLine(self.paper); arrowPool.push(arrow); } nextArrowIndex += 1; } var src, tgt, color; if (pre) { src = self.pregroup[treenode_id].treenode; tgt = self; color = PRE_COLOR; } else { src = self; tgt = self.postgroup[treenode_id].treenode; color = POST_COLOR; } arrow.init(src.x, src.y, tgt.x, tgt.y, confidence, color, self.id, treenode_id); return arrow; }; }(); var is_middle_click = function(e) { return 2 === e.which; }; }();
SVGOverlay: more fixes for connector arrows. The main issue is with the connectorDrawEdges having two functions: (1) creating the arrows and (2) updating them when moving a connector.
django/applications/catmaid/static/widgets/overlay_node.js
SVGOverlay: more fixes for connector arrows.
<ide><path>jango/applications/catmaid/static/widgets/overlay_node.js <ide> this.r = 8; <ide> this.c = null; // The Raphael circle for drawing <ide> this.mc = null; // The Raphael circle for mouse actions (it's a bit larger) <del> this.preLines = {}; // The Raphael edges to the presynaptic nodes <del> // TODO preLines and postLines should be null, and arrays when full <del> this.postLines = {}; // The Raphael edges to the postsynaptic nodes <add> this.preLines = null; // Array of ArrowLine to the presynaptic nodes <add> this.postLines = null; // Array of ArrowLine to the postsynaptic nodes <ide> this.fillcolor = null; <ide> <ide> // Member functions <ide> con.postgroup = null; <ide> con.paper = null; <ide> // Note: mouse event handlers are removed by c.remove and mc.remove() <del> removeConnectorArrows(con.preLines, con.postLines); // also removes confidence text associated with edges <add> removeConnectorArrows(con); // also removes confidence text associated with edges <ide> con.preLines = null; <ide> con.postLines = null; <ide> }; <ide> } <ide> } <ide> <del> // preLines and postLines are always removed and then recreated when calling drawEdges <add> c.preLines = null; <add> c.postLines = null; <ide> }; <ide> <ide> /** <ide> c.c.hide(); <ide> c.mc.hide(); <ide> } <del> removeConnectorArrows(c.preLines, c.postLines); <add> removeConnectorArrows(c); <ide> }; <ide> <ide> /** Here 'this' is the connector node. */ <ide> }; <ide> <ide> /** Disables the ArrowLine object and removes entries from the preLines and postLines. */ <del> var removeConnectorArrows = function(preLines, postLines) { <del> var i; <del> for (i in preLines) { <del> if (preLines.hasOwnProperty(i)) { <del> preLines[i].disable(); <del> delete preLines[i]; <del> } <del> } <del> for (i in postLines) { <del> if (postLines.hasOwnProperty(i)) { <del> postLines[i].disable(); <del> delete postLines[i]; <del> } <add> var removeConnectorArrows = function(c) { <add> var disable; <add> if (c.preLines || c.postLines) disable = function(arrow) { arrow.disable(); }; <add> if (c.preLines) { <add> c.preLines.forEach(disable); <add> c.preLines = null; <add> } <add> if (c.postLines) { <add> c.postLines.forEach(disable); <add> c.postLines = null; <ide> } <ide> }; <ide> <ide> /** <ide> * Here 'this' is the connector node. <ide> */ <del> var connectorDrawEdges = function() <add> var connectorDrawEdges = function(redraw) <ide> { <ide> var i, <ide> tnid, <ide> confidence, <ide> pregroup = this.pregroup, <ide> postgroup = this.postgroup; <add> <add> if (redraw) { <add> removeConnectorArrows(this); <add> } <ide> <ide> // re-create <ide> for (i in pregroup) { <ide> confidence = pregroup[i].confidence; <ide> if (displayBetweenNodes(this, treenode)) { <ide> tnid = treenode.id; <del> this.preLines[tnid] = connectorCreateArrow(this, tnid, confidence, true, this.preLines[tnid]); <del> } else if (this.preLines.hasOwnProperty(tnid)) { <del> this.preLines[tnid].disable(); <del> delete this.preLines[tnid]; <add> if (!this.preLines) this.preLines = []; <add> this.preLines.push(connectorCreateArrow(this, tnid, confidence, true)); <ide> } <ide> } <ide> } <ide> confidence = postgroup[i].confidence; <ide> if (displayBetweenNodes(this, treenode)) { <ide> tnid = treenode.id; <del> this.postLines[tnid] = connectorCreateArrow(this, tnid, confidence, false, this.postLines[tnid]); <del> } else if (this.postLines.hasOwnProperty(tnid)) { <del> this.postLines[tnid].disable(); <del> delete this.postLines[tnid]; <add> if (!this.postLines) this.postLines = []; <add> this.postLines.push(connectorCreateArrow(this, tnid, confidence, false)); <ide> } <ide> } <ide> }
JavaScript
mit
8e36012b1b19d12c6265bdec033063d8d25a4812
0
tracer99/canjs,gsmeets/canjs,gsmeets/canjs,bitovi/canjs,rasjani/canjs,tracer99/canjs,rjgotten/canjs,rasjani/canjs,tracer99/canjs,rjgotten/canjs,rasjani/canjs,bitovi/canjs,rjgotten/canjs,rjgotten/canjs,bitovi/canjs,gsmeets/canjs,bitovi/canjs,rasjani/canjs
steal('can/util/can.js', function (can) { // fragment.js // --------- // _DOM Fragment support._ var fragmentRE = /^\s*<(\w+)[^>]*>/, toString = {}.toString, fragment = function (html, name, doc) { if (name === undefined) { name = fragmentRE.test(html) && RegExp.$1; } if (html && toString.call(html.replace) === "[object Function]") { // Fix "XHTML"-style tags in all browsers html = html.replace(/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi, '<$1></$2>'); } var container = doc.createElement('div'), temp = doc.createElement('div'); // IE's parser will strip any `<tr><td>` tags when `innerHTML` // is called on a `tbody`. To get around this, we construct a // valid table with a `tbody` that has the `innerHTML` we want. // Then the container is the `firstChild` of the `tbody`. // [source](http://www.ericvasilik.com/2006/07/code-karma.html). if (name === 'tbody' || name === 'tfoot' || name === 'thead' || name === 'colgroup') { temp.innerHTML = '<table>' + html + '</table>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild; } else if (name === 'col') { temp.innerHTML = '<table><colgroup>' + html + '</colgroup></table>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild.firstChild; } else if (name === 'tr') { temp.innerHTML = '<table><tbody>' + html + '</tbody></table>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild.firstChild; } else if (name === 'td' || name === 'th') { temp.innerHTML = '<table><tbody><tr>' + html + '</tr></tbody></table>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild.firstChild.firstChild; } else if (name === 'option') { temp.innerHTML = '<select>' + html + '</select>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild; } else { container.innerHTML = '' + html; } // IE8 barfs if you pass slice a `childNodes` object, so make a copy. var tmp = {}, children = can.childNodes( container ); tmp.length = children.length; for (var i = 0; i < children.length; i++) { tmp[i] = children[i]; } return [].slice.call(tmp); }; can.buildFragment = function (html, doc) { if(html && html.nodeType === 11) { return html; } if(!doc) { doc = document; } else if(doc.length) { doc = doc[0]; } var parts = fragment(html, undefined, doc), frag = (doc || document).createDocumentFragment(); for(var i = 0, length = parts.length; i < length; i++) { frag.appendChild(parts[i]); } return frag; }; // ## Fix build fragment. // In IE8, we can pass a fragment and it removes newlines. // This checks for that and replaces can.buildFragment with something // that if only a single text node is returned, returns a fragment with // a text node that is set to the content. (function(){ var text = "<-\n>", frag = can.buildFragment(text, document); if(text !== frag.firstChild.nodeValue) { var oldBuildFragment = can.buildFragment; can.buildFragment = function(html, nodes){ var res = oldBuildFragment(html, nodes); if(res.childNodes.length === 1 && res.childNodes[0].nodeType === 3) { res.childNodes[0].nodeValue = html; } return res; }; } })(); return can; });
util/fragment.js
steal('can/util/can.js', function (can) { // fragment.js // --------- // _DOM Fragment support._ var fragmentRE = /^\s*<(\w+)[^>]*>/, toString = {}.toString, fragment = function (html, name, doc) { if (name === undefined) { name = fragmentRE.test(html) && RegExp.$1; } if (html && toString.call(html.replace) === "[object Function]") { // Fix "XHTML"-style tags in all browsers html = html.replace(/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi, '<$1></$2>'); } var container = doc.createElement('div'), temp = doc.createElement('div'); // IE's parser will strip any `<tr><td>` tags when `innerHTML` // is called on a `tbody`. To get around this, we construct a // valid table with a `tbody` that has the `innerHTML` we want. // Then the container is the `firstChild` of the `tbody`. // [source](http://www.ericvasilik.com/2006/07/code-karma.html). if (name === 'tbody' || name === 'tfoot' || name === 'thead' || name === 'colgroup') { temp.innerHTML = '<table>' + html + '</table>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild; } else if (name === 'col') { temp.innerHTML = '<table><colgroup>' + html + '</colgroup></table>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild.firstChild; } else if (name === 'tr') { temp.innerHTML = '<table><tbody>' + html + '</tbody></table>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild.firstChild; } else if (name === 'td' || name === 'th') { temp.innerHTML = '<table><tbody><tr>' + html + '</tr></tbody></table>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild.firstChild.firstChild; } else if (name === 'option') { temp.innerHTML = '<select>' + html + '</select>'; container = temp.firstChild.nodeType === 3 ? temp.lastChild : temp.firstChild; } else { container.innerHTML = '' + html; } // IE8 barfs if you pass slice a `childNodes` object, so make a copy. var tmp = {}, children = can.childNodes( container ); tmp.length = children.length; for (var i = 0; i < children.length; i++) { tmp[i] = children[i]; } return [].slice.call(tmp); }; can.buildFragment = function (html, doc) { if(html && html.nodeType === 11) { return html; } var parts = fragment(html, undefined, doc || document), frag = (doc || document).createDocumentFragment(); for(var i = 0, length = parts.length; i < length; i++) { frag.appendChild(parts[i]); } return frag; }; // ## Fix build fragment. // In IE8, we can pass a fragment and it removes newlines. // This checks for that and replaces can.buildFragment with something // that if only a single text node is returned, returns a fragment with // a text node that is set to the content. (function(){ var text = "<-\n>", frag = can.buildFragment(text, document); if(text !== frag.firstChild.nodeValue) { var oldBuildFragment = can.buildFragment; can.buildFragment = function(html, nodes){ var res = oldBuildFragment(html, nodes); if(res.childNodes.length === 1 && res.childNodes[0].nodeType === 3) { res.childNodes[0].nodeValue = html; } return res; }; } })(); return can; });
makes compat work with EJS
util/fragment.js
makes compat work with EJS
<ide><path>til/fragment.js <ide> if(html && html.nodeType === 11) { <ide> return html; <ide> } <del> var parts = fragment(html, undefined, doc || document), <add> if(!doc) { <add> doc = document; <add> } else if(doc.length) { <add> doc = doc[0]; <add> } <add> <add> var parts = fragment(html, undefined, doc), <ide> frag = (doc || document).createDocumentFragment(); <ide> for(var i = 0, length = parts.length; i < length; i++) { <ide> frag.appendChild(parts[i]);
JavaScript
mit
1b7c1e672aa5b1240fc6eaed263769d0c1bd41ac
0
elexonics/bootstrap-table,zartata/bootstrap-table,djhvscf/bootstrap-table,yenyen/bootstrap-table,moccen/bootstrap-table,yepiaoyang/bootstrap-table,AlfiyaZi/bootstrap-table,hanyuzhou2006/bootstrap-table,veggiemonk/bootstrap-table,opan/bootstrap-table,Projjol/bootstrap-table,olly79/bootstrap-table,zofuthan/bootstrap-table,veggiemonk/bootstrap-table,shaunkleyn/bootstrap-table,MossoIsai/bootstrap-table,voliynyk/bootstrap-table,Projjol/bootstrap-table,AladdinSonni/bootstrap-table,PeterDaveHello/bootstrap-table,rokdevil/bootstrap-table,jhs-s/bootstrap-table,hya1109/bootstrap-table,c1240539157/bootstrap-table,danielsharvey/bootstrap-table,PeterDaveHello/bootstrap-table,facualle/bootstrap-table,y1170328179s/Bootstrap-table,hanyuzhou2006/bootstrap-table,yepiaoyang/bootstrap-table,FellinRoberto/bootstrap-table,robpvn/bootstrap-table,raku/bootstrap-table,u01jmg3/bootstrap-table,messboy/bootstrap-table,mmochetti/bootstrap-table,fhchina/bootstrap-table,hussardbleu/bootstrap-table,shashmehro/bootstrap-table,tmorehouse/bootstrap-table,robpvn/bootstrap-table,wagnerjs/bootstrap-table,CIFullCalendar/bootstrap-table,heatery/bootstrap-table,pengwenbin7/bootstrap-table,horken-w/bootstrap-table,laborautonomo/bootstrap-table,pankaj-dhami/bootstrap-table,tm1000/bootstrap-table,c1240539157/bootstrap-table,tmorehouse/bootstrap-table,tmazur/bootstrap-table,ilovezy/bootstrap-table,bashlakov/bootstrap-table,carlos-granadeno/bootstrap-table,wagnerjs/bootstrap-table,tmazur/bootstrap-table,wenzhixin/bootstrap-table,tm1000/bootstrap-table,hnakamur/bootstrap-table,wenzhixin/bootstrap-table,jCaptain/bootstrap-table,zartata/bootstrap-table,AlfiyaZi/bootstrap-table,hussardbleu/bootstrap-table,facualle/bootstrap-table,mmochetti/bootstrap-table,moccen/bootstrap-table,ilovezy/bootstrap-table,carlos-granadeno/bootstrap-table,staeiou/bootstrap-table,rokdevil/bootstrap-table,messboy/bootstrap-table,admpub/bootstrap-table,venusdharan/bootstrap-table,fredyteheranto/bootstrap-table,shaunkleyn/bootstrap-table,Flounn/bootstrap-table,wenzhixin/bootstrap-table,bashlakov/bootstrap-table,voliynyk/bootstrap-table,laborautonomo/bootstrap-table,heatery/bootstrap-table,jhs-s/bootstrap-table,djhvscf/bootstrap-table,AladdinSonni/bootstrap-table,stevenjlho/bootstrap-table,djhvscf/bootstrap-table,fredyteheranto/bootstrap-table,olly79/bootstrap-table,zofuthan/bootstrap-table,jCaptain/bootstrap-table,elexonics/bootstrap-table,fhchina/bootstrap-table,yenyen/bootstrap-table,pankaj-dhami/bootstrap-table,MossoIsai/bootstrap-table,admpub/bootstrap-table,stevenjlho/bootstrap-table,staeiou/bootstrap-table,Norkart/bootstrap-table,hya1109/bootstrap-table,raku/bootstrap-table,sergiobr/bootstrap-table,opan/bootstrap-table,CIFullCalendar/bootstrap-table,u01jmg3/bootstrap-table,shashmehro/bootstrap-table,pengwenbin7/bootstrap-table,hnakamur/bootstrap-table,djhvscf/bootstrap-table,FellinRoberto/bootstrap-table,venusdharan/bootstrap-table,sergiobr/bootstrap-table,Norkart/bootstrap-table
/** * @author zhixin wen <[email protected]> * version: 1.6.0 * https://github.com/wenzhixin/bootstrap-table/ */ !function ($) { 'use strict'; // TOOLS DEFINITION // ====================== // it only does '%s', and return '' when arguments are undefined var sprintf = function(str) { var args = arguments, flag = true, i = 1; str = str.replace(/%s/g, function () { var arg = args[i++]; if (typeof arg === 'undefined') { flag = false; return ''; } return arg; }); return flag ? str : ''; }; var getPropertyFromOther = function (list, from, to, value) { var result = ''; $.each(list, function (i, item) { if (item[from] === value) { result = item[to]; return false; } return true; }); return result; }; var getFieldIndex = function (columns, field) { var index = -1; $.each(columns, function (i, column) { if (column.field === field) { index = i; return false; } return true; }); return index; }; var getScrollBarWidth = function () { var inner = $('<p/>').addClass('fixed-table-scroll-inner'), outer = $('<div/>').addClass('fixed-table-scroll-outer'), w1, w2; outer.append(inner); $('body').append(outer); w1 = inner[0].offsetWidth; outer.css('overflow', 'scroll'); w2 = inner[0].offsetWidth; if (w1 === w2) { w2 = outer[0].clientWidth; } outer.remove(); return w1 - w2; }; var calculateObjectValue = function (self, name, args, defaultValue) { if (typeof name === 'string') { // support obj.func1.func2 var names = name.split('.'); if (names.length > 1) { name = window; $.each(names, function (i, f) { name = name[f]; }); } else { name = window[name]; } } if (typeof name === 'object') { return name; } if (typeof name === 'function') { return name.apply(self, args); } return defaultValue; }; var escapeHTML = function (text) { if (typeof text === 'string') { return text .replace(/&/g, "&amp;") .replace(/</g, "&lt;") .replace(/>/g, "&gt;") .replace(/"/g, "&quot;") .replace(/'/g, "&#039;"); } return text; }; // BOOTSTRAP TABLE CLASS DEFINITION // ====================== var BootstrapTable = function (el, options) { this.options = options; this.$el = $(el); this.$el_ = this.$el.clone(); this.timeoutId_ = 0; this.init(); }; BootstrapTable.DEFAULTS = { classes: 'table table-hover', height: undefined, undefinedText: '-', sortName: undefined, sortOrder: 'asc', striped: false, columns: [], data: [], method: 'get', url: undefined, cache: true, contentType: 'application/json', dataType: 'json', ajaxOptions: {}, queryParams: function (params) {return params;}, queryParamsType: 'limit', // undefined responseHandler: function (res) {return res;}, pagination: false, sidePagination: 'client', // client or server totalRows: 0, // server side need to set pageNumber: 1, pageSize: 10, pageList: [10, 25, 50, 100], search: false, searchAlign: 'right', selectItemName: 'btSelectItem', showHeader: true, showColumns: false, showPaginationSwitch: false, showRefresh: false, showToggle: false, buttonsAlign: 'right', smartDisplay: true, minimumCountColumns: 1, idField: undefined, cardView: false, trimOnSearch: true, clickToSelect: false, singleSelect: false, toolbar: undefined, toolbarAlign: 'left', checkboxHeader: true, sortable: true, maintainSelected: false, searchTimeOut: 500, iconSize: undefined, iconsPrefix: 'glyphicon', // glyphicon of fa (font awesome) icons: { paginationSwitchDown: 'glyphicon-collapse-down icon-chevron-down', paginationSwitchUp: 'glyphicon-collapse-up icon-chevron-up', refresh: 'glyphicon-refresh icon-refresh', toggle: 'glyphicon-list-alt icon-list-alt', columns: 'glyphicon-th icon-th' }, rowStyle: function (row, index) {return {};}, rowAttributes: function (row, index) {return {};}, onAll: function (name, args) {return false;}, onClickRow: function (item, $element) {return false;}, onDblClickRow: function (item, $element) {return false;}, onSort: function (name, order) {return false;}, onCheck: function (row) {return false;}, onUncheck: function (row) {return false;}, onCheckAll: function () {return false;}, onUncheckAll: function () {return false;}, onLoadSuccess: function (data) {return false;}, onLoadError: function (status) {return false;}, onColumnSwitch: function (field, checked) {return false;}, onPageChange: function (number, size) {return false;}, onSearch: function (text) {return false;}, onPreBody: function (data) {return false;}, onPostBody: function () {return false;}, onPostHeader: function() {return false;} }; BootstrapTable.LOCALES = []; BootstrapTable.LOCALES['en-US'] = { formatLoadingMessage: function () { return 'Loading, please wait...'; }, formatRecordsPerPage: function (pageNumber) { return sprintf('%s records per page', pageNumber); }, formatShowingRows: function (pageFrom, pageTo, totalRows) { return sprintf('Showing %s to %s of %s rows', pageFrom, pageTo, totalRows); }, formatSearch: function () { return 'Search'; }, formatNoMatches: function () { return 'No matching records found'; }, formatPaginationSwitch: function () { return 'Hide/Show pagination'; }, formatRefresh: function () { return 'Refresh'; }, formatToggle: function () { return 'Toggle'; }, formatColumns: function () { return 'Columns'; } }; $.extend(BootstrapTable.DEFAULTS, BootstrapTable.LOCALES['en-US']); BootstrapTable.COLUMN_DEFAULTS = { radio: false, checkbox: false, checkboxEnabled: true, field: undefined, title: undefined, 'class': undefined, align: undefined, // left, right, center halign: undefined, // left, right, center valign: undefined, // top, middle, bottom width: undefined, sortable: false, order: 'asc', // asc, desc visible: true, switchable: true, clickToSelect: true, formatter: undefined, events: undefined, sorter: undefined, cellStyle: undefined, searchable: true }; BootstrapTable.EVENTS = { 'all.bs.table': 'onAll', 'click-row.bs.table': 'onClickRow', 'dbl-click-row.bs.table': 'onDblClickRow', 'sort.bs.table': 'onSort', 'check.bs.table': 'onCheck', 'uncheck.bs.table': 'onUncheck', 'check-all.bs.table': 'onCheckAll', 'uncheck-all.bs.table': 'onUncheckAll', 'load-success.bs.table': 'onLoadSuccess', 'load-error.bs.table': 'onLoadError', 'column-switch.bs.table': 'onColumnSwitch', 'page-change.bs.table': 'onPageChange', 'search.bs.table': 'onSearch', 'pre-body.bs.table': 'onPreBody', 'post-body.bs.table': 'onPostBody', 'post-header.bs.table' : 'onPostHeader' }; BootstrapTable.prototype.init = function () { this.initContainer(); this.initTable(); this.initHeader(); this.initData(); this.initToolbar(); this.initPagination(); this.initBody(); this.initServer(); }; BootstrapTable.prototype.initContainer = function () { this.$container = $([ '<div class="bootstrap-table">', '<div class="fixed-table-toolbar"></div>', '<div class="fixed-table-container">', '<div class="fixed-table-header"><table></table></div>', '<div class="fixed-table-body">', '<div class="fixed-table-loading">', this.options.formatLoadingMessage(), '</div>', '</div>', '<div class="fixed-table-pagination"></div>', '</div>', '</div>'].join('')); this.$container.insertAfter(this.$el); this.$container.find('.fixed-table-body').append(this.$el); this.$container.after('<div class="clearfix"></div>'); this.$loading = this.$container.find('.fixed-table-loading'); this.$el.addClass(this.options.classes); if (this.options.striped) { this.$el.addClass('table-striped'); } }; BootstrapTable.prototype.initTable = function () { var that = this, columns = [], data = []; this.$header = this.$el.find('thead'); if (!this.$header.length) { this.$header = $('<thead></thead>').appendTo(this.$el); } if (!this.$header.find('tr').length) { this.$header.append('<tr></tr>'); } this.$header.find('th').each(function () { var column = $.extend({}, { title: $(this).html(), 'class': $(this).attr('class') }, $(this).data()); columns.push(column); }); this.options.columns = $.extend([], columns, this.options.columns); $.each(this.options.columns, function (i, column) { that.options.columns[i] = $.extend({}, BootstrapTable.COLUMN_DEFAULTS, {field: i}, column); // when field is undefined, use index instead }); // if options.data is setting, do not process tbody data if (this.options.data.length) { return; } this.$el.find('tbody tr').each(function () { var row = {}; // save tr's id and class row._id = $(this).attr('id'); row._class = $(this).attr('class'); $(this).find('td').each(function (i) { var field = that.options.columns[i].field; row[field] = $(this).html(); // save td's id and class row['_' + field + '_id'] = $(this).attr('id'); row['_' + field + '_class'] = $(this).attr('class'); }); data.push(row); }); this.options.data = data; }; BootstrapTable.prototype.initHeader = function () { var that = this, visibleColumns = [], html = []; this.header = { fields: [], styles: [], classes: [], formatters: [], events: [], sorters: [], cellStyles: [], clickToSelects: [], searchables: [] }; $.each(this.options.columns, function (i, column) { var text = '', halign = '', // header align style align = '', // body align style style = '', class_ = sprintf(' class="%s"', column['class']), order = that.options.sortOrder || column.order, searchable = true; if (!column.visible) { return; } halign = sprintf('text-align: %s; ', column.halign ? column.halign : column.align); align = sprintf('text-align: %s; ', column.align); style = sprintf('vertical-align: %s; ', column.valign); style += sprintf('width: %spx; ', column.checkbox || column.radio ? 36 : column.width); visibleColumns.push(column); that.header.fields.push(column.field); that.header.styles.push(align + style); that.header.classes.push(class_); that.header.formatters.push(column.formatter); that.header.events.push(column.events); that.header.sorters.push(column.sorter); that.header.cellStyles.push(column.cellStyle); that.header.clickToSelects.push(column.clickToSelect); that.header.searchables.push(column.searchable); html.push('<th', column.checkbox || column.radio ? sprintf(' class="bs-checkbox %s"', column['class'] || '') : class_, sprintf(' style="%s"', halign + style), '>'); html.push(sprintf('<div class="th-inner %s">', that.options.sortable && column.sortable ? 'sortable' : '')); text = column.title; if (that.options.sortName === column.field && that.options.sortable && column.sortable) { text += that.getCaretHtml(); } if (column.checkbox) { if (!that.options.singleSelect && that.options.checkboxHeader) { text = '<input name="btSelectAll" type="checkbox" />'; } that.header.stateField = column.field; } if (column.radio) { text = ''; that.header.stateField = column.field; that.options.singleSelect = true; } html.push(text); html.push('</div>'); html.push('<div class="fht-cell"></div>'); html.push('</th>'); }); this.$header.find('tr').html(html.join('')); this.$header.find('th').each(function (i) { $(this).data(visibleColumns[i]); }); this.$container.off('click', 'th').on('click', 'th', function (event) { if (that.options.sortable && $(this).data().sortable) { that.onSort(event); } }); if (!this.options.showHeader || this.options.cardView) { this.$header.hide(); this.$container.find('.fixed-table-header').hide(); this.$loading.css('top', 0); } else { this.$header.show(); this.$container.find('.fixed-table-header').show(); this.$loading.css('top', '37px'); } this.$selectAll = this.$header.find('[name="btSelectAll"]'); this.$container.off('click', '[name="btSelectAll"]') .on('click', '[name="btSelectAll"]', function () { var checked = $(this).prop('checked'); that[checked ? 'checkAll' : 'uncheckAll'](); }); }; /** * @param data * @param type: append / prepend */ BootstrapTable.prototype.initData = function (data, type) { if (type === 'append') { this.data = this.data.concat(data); } else if (type === 'prepend') { this.data = [].concat(data).concat(this.data); } else { this.data = data || this.options.data; } this.options.data = this.data; if (this.options.sidePagination === 'server') { return; } this.initSort(); }; BootstrapTable.prototype.initSort = function () { var that = this, name = this.options.sortName, order = this.options.sortOrder === 'desc' ? -1 : 1, index = $.inArray(this.options.sortName, this.header.fields); if (index !== -1) { this.data.sort(function (a, b) { var aa = a[name], bb = b[name], value = calculateObjectValue(that.header, that.header.sorters[index], [aa, bb]); if (value !== undefined) { return order * value; } // Convert numerical values form string to float. if ($.isNumeric(aa)) { aa = parseFloat(aa); } if ($.isNumeric(bb)) { bb = parseFloat(bb); } // Fix #161: undefined or null string sort bug. if (aa === undefined || aa === null) { aa = ''; } if (bb === undefined || bb === null) { bb = ''; } if ($.isNumeric(aa) && $.isNumeric(bb)) { if (aa < bb) { return order * -1; } return order; } if (aa === bb) { return 0; } if (aa.localeCompare(bb) === -1) { return order * -1; } return order; }); } }; BootstrapTable.prototype.onSort = function (event) { var $this = $(event.currentTarget), $this_ = this.$header.find('th').eq($this.index()); this.$header.add(this.$header_).find('span.order').remove(); if (this.options.sortName === $this.data('field')) { this.options.sortOrder = this.options.sortOrder === 'asc' ? 'desc' : 'asc'; } else { this.options.sortName = $this.data('field'); this.options.sortOrder = $this.data('order') === 'asc' ? 'desc' : 'asc'; } this.trigger('sort', this.options.sortName, this.options.sortOrder); $this.add($this_).data('order', this.options.sortOrder) .find('.th-inner').append(this.getCaretHtml()); if (this.options.sidePagination === 'server') { this.initServer(); return; } this.initSort(); this.initBody(); }; BootstrapTable.prototype.initToolbar = function () { var that = this, html = [], timeoutId = 0, $keepOpen, $search, switchableCount = 0; this.$toolbar = this.$container.find('.fixed-table-toolbar').html(''); if (typeof this.options.toolbar === 'string') { $(sprintf('<div class="bars pull-%s"></div>', this.options.toolbarAlign)) .appendTo(this.$toolbar) .append($(this.options.toolbar)); } // showColumns, showToggle, showRefresh html = [sprintf('<div class="columns columns-%s btn-group pull-%s">', this.options.buttonsAlign, this.options.buttonsAlign)]; if (typeof this.options.icons === 'string') { this.options.icons = calculateObjectValue(null, this.options.icons); } if (this.options.showPaginationSwitch) { html.push(sprintf('<button class="btn btn-default" type="button" name="paginationSwitch" title="%s">', this.options.formatPaginationSwitch()), sprintf('<i class="%s %s"></i>', this.options.iconsPrefix, this.options.icons.paginationSwitchDown), '</button>'); } if (this.options.showRefresh) { html.push(sprintf('<button class="btn btn-default' + (this.options.iconSize == undefined ? '' : ' btn-' + this.options.iconSize) + '" type="button" name="refresh" title="%s">', this.options.formatRefresh()), sprintf('<i class="%s %s"></i>', this.options.iconsPrefix, this.options.icons.refresh), '</button>'); } if (this.options.showToggle) { html.push(sprintf('<button class="btn btn-default' + (this.options.iconSize == undefined ? '' : ' btn-' + this.options.iconSize) + '" type="button" name="toggle" title="%s">', this.options.formatToggle()), sprintf('<i class="%s %s"></i>', this.options.iconsPrefix, this.options.icons.toggle), '</button>'); } if (this.options.showColumns) { html.push(sprintf('<div class="keep-open btn-group" title="%s">', this.options.formatColumns()), '<button type="button" class="btn btn-default' + (this.options.iconSize == undefined ? '' : ' btn-' + this.options.iconSize) + ' dropdown-toggle" data-toggle="dropdown">', sprintf('<i class="%s %s"></i>', this.options.iconsPrefix, this.options.icons.columns), ' <span class="caret"></span>', '</button>', '<ul class="dropdown-menu" role="menu">'); $.each(this.options.columns, function (i, column) { if (column.radio || column.checkbox) { return; } var checked = column.visible ? ' checked="checked"' : ''; if (column.switchable) { html.push(sprintf('<li>' + '<label><input type="checkbox" data-field="%s" value="%s"%s> %s</label>' + '</li>', column.field, i, checked, column.title)); switchableCount++; } }); html.push('</ul>', '</div>'); } html.push('</div>'); // Fix #188: this.showToolbar is for extentions if (this.showToolbar || html.length > 2) { this.$toolbar.append(html.join('')); } if (this.options.showPaginationSwitch) { this.$toolbar.find('button[name="paginationSwitch"]') .off('click').on('click', $.proxy(this.togglePagination, this)); } if (this.options.showRefresh) { this.$toolbar.find('button[name="refresh"]') .off('click').on('click', $.proxy(this.refresh, this)); } if (this.options.showToggle) { this.$toolbar.find('button[name="toggle"]') .off('click').on('click', function () { that.options.cardView = !that.options.cardView; that.initHeader(); that.initBody(); }); } if (this.options.showColumns) { $keepOpen = this.$toolbar.find('.keep-open'); if (switchableCount <= this.options.minimumCountColumns) { $keepOpen.find('input').prop('disabled', true); } $keepOpen.find('li').off('click').on('click', function (event) { event.stopImmediatePropagation(); }); $keepOpen.find('input').off('click').on('click', function () { var $this = $(this); that.toggleColumn($this.val(), $this.prop('checked'), false); that.trigger('column-switch', $(this).data('field'), $this.prop('checked')); }); } if (this.options.search) { html = []; html.push( '<div class="pull-' + this.options.searchAlign + ' search">', sprintf('<input class="form-control' + (this.options.iconSize == undefined ? '' : ' input-' + this.options.iconSize) + '" type="text" placeholder="%s">', this.options.formatSearch()), '</div>'); this.$toolbar.append(html.join('')); $search = this.$toolbar.find('.search input'); $search.off('keyup').on('keyup', function (event) { clearTimeout(timeoutId); // doesn't matter if it's 0 timeoutId = setTimeout(function () { that.onSearch(event); }, that.options.searchTimeOut); }); } }; BootstrapTable.prototype.onSearch = function (event) { var text = $.trim($(event.currentTarget).val()); // trim search input if(this.options.trimOnSearch) { $(event.currentTarget).val(text); } if (text === this.searchText) { return; } this.searchText = text; this.options.pageNumber = 1; this.initSearch(); this.updatePagination(); this.trigger('search', text); }; BootstrapTable.prototype.initSearch = function () { var that = this; if (this.options.sidePagination !== 'server') { var s = this.searchText && this.searchText.toLowerCase(); var f = $.isEmptyObject(this.filterColumns) ? null: this.filterColumns; // Check filter this.data = f ? $.grep(this.options.data, function (item, i) { for (var key in f) { if (item[key] !== f[key]) { return false; } } return true; }) : this.options.data; this.data = s ? $.grep(this.data, function (item, i) { for (var key in item) { key = $.isNumeric(key) ? parseInt(key, 10) : key; var value = item[key]; // Fix #142: search use formated data value = calculateObjectValue(that.header, that.header.formatters[$.inArray(key, that.header.fields)], [value, item, i], value); var index = $.inArray(key, that.header.fields); if (index !== -1 && that.header.searchables[index] && (typeof value === 'string' || typeof value === 'number') && (value + '').toLowerCase().indexOf(s) !== -1) { return true; } } return false; }) : this.data; } }; BootstrapTable.prototype.initPagination = function () { this.$pagination = this.$container.find('.fixed-table-pagination'); if (!this.options.pagination) { this.$pagination.hide(); return; } else { this.$pagination.show(); } var that = this, html = [], i, from, to, $pageList, $first, $pre, $next, $last, $number, data = this.getData(); if (this.options.sidePagination !== 'server') { this.options.totalRows = data.length; } this.totalPages = 0; if (this.options.totalRows) { this.totalPages = ~~((this.options.totalRows - 1) / this.options.pageSize) + 1; this.options.totalPages = this.totalPages; } if (this.totalPages > 0 && this.options.pageNumber > this.totalPages) { this.options.pageNumber = this.totalPages; } this.pageFrom = (this.options.pageNumber - 1) * this.options.pageSize + 1; this.pageTo = this.options.pageNumber * this.options.pageSize; if (this.pageTo > this.options.totalRows) { this.pageTo = this.options.totalRows; } html.push( '<div class="pull-left pagination-detail">', '<span class="pagination-info">', this.options.formatShowingRows(this.pageFrom, this.pageTo, this.options.totalRows), '</span>'); html.push('<span class="page-list">'); var pageNumber = [ '<span class="btn-group dropup">', '<button type="button" class="btn btn-default '+ (this.options.iconSize == undefined ? '' : ' btn-' + this.options.iconSize)+ ' dropdown-toggle" data-toggle="dropdown">', '<span class="page-size">', this.options.pageSize, '</span>', ' <span class="caret"></span>', '</button>', '<ul class="dropdown-menu" role="menu">'], pageList = this.options.pageList; if (typeof this.options.pageList === 'string') { var list = this.options.pageList.replace('[', '').replace(']', '').replace(/ /g, '').split(','); pageList = []; $.each(list, function (i, value) { pageList.push(+value); }); } $.each(pageList, function (i, page) { if (!that.options.smartDisplay || i === 0 || pageList[i-1] <= that.options.totalRows) { var active = page === that.options.pageSize ? ' class="active"' : ''; pageNumber.push(sprintf('<li%s><a href="javascript:void(0)">%s</a></li>', active, page)); } }); pageNumber.push('</ul></span>'); html.push(this.options.formatRecordsPerPage(pageNumber.join(''))); html.push('</span>'); html.push('</div>', '<div class="pull-right pagination">', '<ul class="pagination' + (this.options.iconSize == undefined ? '' : ' pagination-' + this.options.iconSize) + '">', '<li class="page-first"><a href="javascript:void(0)">&lt;&lt;</a></li>', '<li class="page-pre"><a href="javascript:void(0)">&lt;</a></li>'); if (this.totalPages < 5) { from = 1; to = this.totalPages; } else { from = this.options.pageNumber - 2; to = from + 4; if (from < 1) { from = 1; to = 5; } if (to > this.totalPages) { to = this.totalPages; from = to - 4; } } for (i = from; i <= to; i++) { html.push('<li class="page-number' + (i === this.options.pageNumber ? ' active' : '') + '">', '<a href="javascript:void(0)">', i ,'</a>', '</li>'); } html.push( '<li class="page-next"><a href="javascript:void(0)">&gt;</a></li>', '<li class="page-last"><a href="javascript:void(0)">&gt;&gt;</a></li>', '</ul>', '</div>'); this.$pagination.html(html.join('')); $pageList = this.$pagination.find('.page-list a'); $first = this.$pagination.find('.page-first'); $pre = this.$pagination.find('.page-pre'); $next = this.$pagination.find('.page-next'); $last = this.$pagination.find('.page-last'); $number = this.$pagination.find('.page-number'); if (this.options.pageNumber <= 1) { $first.addClass('disabled'); $pre.addClass('disabled'); } if (this.options.pageNumber >= this.totalPages) { $next.addClass('disabled'); $last.addClass('disabled'); } if (this.options.smartDisplay) { if (this.totalPages <= 1) { this.$pagination.find('div.pagination').hide(); } if (this.options.pageList.length < 2 || this.options.totalRows <= this.options.pageList[0]) { this.$pagination.find('span.page-list').hide(); } // when data is empty, hide the pagination this.$pagination[this.getData().length ? 'show' : 'hide'](); } $pageList.off('click').on('click', $.proxy(this.onPageListChange, this)); $first.off('click').on('click', $.proxy(this.onPageFirst, this)); $pre.off('click').on('click', $.proxy(this.onPagePre, this)); $next.off('click').on('click', $.proxy(this.onPageNext, this)); $last.off('click').on('click', $.proxy(this.onPageLast, this)); $number.off('click').on('click', $.proxy(this.onPageNumber, this)); }; BootstrapTable.prototype.updatePagination = function (event) { // Fix #171: IE disabled button can be clicked bug. if (event && $(event.currentTarget).hasClass('disabled')) { return; } if (!this.options.maintainSelected) { this.resetRows(); } this.initPagination(); if (this.options.sidePagination === 'server') { this.initServer(); } else { this.initBody(); } this.trigger('page-change', this.options.pageNumber, this.options.pageSize); }; BootstrapTable.prototype.onPageListChange = function (event) { var $this = $(event.currentTarget); $this.parent().addClass('active').siblings().removeClass('active'); this.options.pageSize = +$this.text(); this.$toolbar.find('.page-size').text(this.options.pageSize); this.updatePagination(event); }; BootstrapTable.prototype.onPageFirst = function (event) { this.options.pageNumber = 1; this.updatePagination(event); }; BootstrapTable.prototype.onPagePre = function (event) { this.options.pageNumber--; this.updatePagination(event); }; BootstrapTable.prototype.onPageNext = function (event) { this.options.pageNumber++; this.updatePagination(event); }; BootstrapTable.prototype.onPageLast = function (event) { this.options.pageNumber = this.totalPages; this.updatePagination(event); }; BootstrapTable.prototype.onPageNumber = function (event) { if (this.options.pageNumber === +$(event.currentTarget).text()) { return; } this.options.pageNumber = +$(event.currentTarget).text(); this.updatePagination(event); }; BootstrapTable.prototype.initBody = function (fixedScroll) { var that = this, html = [], data = this.getData(); this.trigger('pre-body', data); this.$body = this.$el.find('tbody'); if (!this.$body.length) { this.$body = $('<tbody></tbody>').appendTo(this.$el); } //Fix #389 Bootstrap-table-flatJSON is not working if (!this.options.pagination || this.options.sidePagination === 'server') { this.pageFrom = 1; this.pageTo = data.length; } for (var i = this.pageFrom - 1; i < this.pageTo; i++) { var item = data[i], style = {}, csses = [], attributes = {}, htmlAttributes = []; style = calculateObjectValue(this.options, this.options.rowStyle, [item, i], style); if (style && style.css) { for (var key in style.css) { csses.push(key + ': ' + style.css[key]); } } attributes = calculateObjectValue(this.options, this.options.rowAttributes, [item, i], attributes); if (attributes) { for (var key in attributes) { htmlAttributes.push(sprintf('%s="%s"', key, escapeHTML(attributes[key]))); } } html.push('<tr', sprintf(' %s', htmlAttributes.join(' ')), sprintf(' id="%s"', $.isArray(item) ? undefined : item._id), sprintf(' class="%s"', style.classes || ($.isArray(item) ? undefined : item._class)), sprintf(' data-index="%s"', i), '>' ); if (this.options.cardView) { html.push(sprintf('<td colspan="%s">', this.header.fields.length)); } $.each(this.header.fields, function (j, field) { var text = '', value = item[field], type = '', cellStyle = {}, id_ = '', class_ = that.header.classes[j], column = that.options.columns[getFieldIndex(that.options.columns, field)]; style = sprintf('style="%s"', csses.concat(that.header.styles[j]).join('; ')); value = calculateObjectValue(that.header, that.header.formatters[j], [value, item, i], value); // handle td's id and class if (item['_' + field + '_id']) { id_ = sprintf(' id="%s"', item['_' + field + '_id']); } if (item['_' + field + '_class']) { class_ = sprintf(' class="%s"', item['_' + field + '_class']); } cellStyle = calculateObjectValue(that.header, that.header.cellStyles[j], [value, item, i], cellStyle); if (cellStyle.classes) { class_ = sprintf(' class="%s"', cellStyle.classes); } if (cellStyle.css) { var csses_ = []; for (var key in cellStyle.css) { csses_.push(key + ': ' + cellStyle.css[key]); } style = sprintf('style="%s"', csses_.concat(that.header.styles[j]).join('; ')); } if (column.checkbox || column.radio) { type = column.checkbox ? 'checkbox' : type; type = column.radio ? 'radio' : type; text = [that.options.cardView ? '<div class="card-view">' : '<td class="bs-checkbox">', '<input' + sprintf(' data-index="%s"', i) + sprintf(' name="%s"', that.options.selectItemName) + sprintf(' type="%s"', type) + sprintf(' value="%s"', item[that.options.idField]) + sprintf(' checked="%s"', value === true || (value && value.checked) ? 'checked' : undefined) + sprintf(' disabled="%s"', !column.checkboxEnabled || (value && value.disabled) ? 'disabled' : undefined) + ' />', that.options.cardView ? '</div>' : '</td>'].join(''); } else { value = typeof value === 'undefined' || value === null ? that.options.undefinedText : value; text = that.options.cardView ? ['<div class="card-view">', that.options.showHeader ? sprintf('<span class="title" %s>%s</span>', style, getPropertyFromOther(that.options.columns, 'field', 'title', field)) : '', sprintf('<span class="value">%s</span>', value), '</div>'].join('') : [sprintf('<td%s %s %s>', id_, class_, style), value, '</td>'].join(''); // Hide empty data on Card view when smartDisplay is set to true. if (that.options.cardView && that.options.smartDisplay && value === '') { text = ''; } } html.push(text); }); if (this.options.cardView) { html.push('</td>'); } html.push('</tr>'); } // show no records if (!html.length) { html.push('<tr class="no-records-found">', sprintf('<td colspan="%s">%s</td>', this.header.fields.length, this.options.formatNoMatches()), '</tr>'); } this.$body.html(html.join('')); if (!fixedScroll) { this.scrollTo(0); } // click to select by column this.$body.find('> tr > td').off('click').on('click', function () { var $tr = $(this).parent(); that.trigger('click-row', that.data[$tr.data('index')], $tr); // if click to select - then trigger the checkbox/radio click if (that.options.clickToSelect) { if (that.header.clickToSelects[$tr.children().index($(this))]) { $tr.find(sprintf('[name="%s"]', that.options.selectItemName))[0].click(); // #144: .trigger('click') bug } } }); this.$body.find('tr').off('dblclick').on('dblclick', function () { that.trigger('dbl-click-row', that.data[$(this).data('index')], $(this)); }); this.$selectItem = this.$body.find(sprintf('[name="%s"]', this.options.selectItemName)); this.$selectItem.off('click').on('click', function (event) { event.stopImmediatePropagation(); var checked = $(this).prop('checked'), row = that.data[$(this).data('index')]; row[that.header.stateField] = checked; that.trigger(checked ? 'check' : 'uncheck', row); if (that.options.singleSelect) { that.$selectItem.not(this).each(function () { that.data[$(this).data('index')][that.header.stateField] = false; }); that.$selectItem.filter(':checked').not(this).prop('checked', false); } that.updateSelected(); }); $.each(this.header.events, function (i, events) { if (!events) { return; } // fix bug, if events is defined with namespace if (typeof events === 'string') { events = calculateObjectValue(null, events); } for (var key in events) { that.$body.find('tr').each(function () { var $tr = $(this), $td = $tr.find(that.options.cardView ? '.card-view' : 'td').eq(i), index = key.indexOf(' '), name = key.substring(0, index), el = key.substring(index + 1), func = events[key]; $td.find(el).off(name).on(name, function (e) { var index = $tr.data('index'), row = that.data[index], value = row[that.header.fields[i]]; func.apply(this, [e, value, row, index]); }); }); } }); this.updateSelected(); this.resetView(); this.trigger('post-body'); }; BootstrapTable.prototype.initServer = function (silent, query) { var that = this, data = {}, params = { pageSize: this.options.pageSize, pageNumber: this.options.pageNumber, searchText: this.searchText, sortName: this.options.sortName, sortOrder: this.options.sortOrder }; if (!this.options.url) { return; } if (this.options.queryParamsType === 'limit') { params = { search: params.searchText, sort: params.sortName, order: params.sortOrder }; if (this.options.pagination) { params.limit = this.options.pageSize; params.offset = this.options.pageSize * (this.options.pageNumber - 1); } } data = calculateObjectValue(this.options, this.options.queryParams, [params], data); $.extend(data, query || {}); // false to stop request if (data === false) { return; } if (!silent) { this.$loading.show(); } $.ajax($.extend({}, calculateObjectValue(null, this.options.ajaxOptions), { type: this.options.method, url: this.options.url, data: this.options.contentType === 'application/json' && this.options.method === 'post' ? JSON.stringify(data): data, cache: this.options.cache, contentType: this.options.contentType, dataType: this.options.dataType, success: function (res) { res = calculateObjectValue(that.options, that.options.responseHandler, [res], res); that.load(res); that.trigger('load-success', res); }, error: function (res) { that.trigger('load-error', res.status); }, complete: function () { if (!silent) { that.$loading.hide(); } } })); }; BootstrapTable.prototype.getCaretHtml = function () { return ['<span class="order' + (this.options.sortOrder === 'desc' ? '' : ' dropup') + '">', '<span class="caret" style="margin: 10px 5px;"></span>', '</span>'].join(''); }; BootstrapTable.prototype.updateSelected = function () { var checkAll = this.$selectItem.filter(':enabled').length === this.$selectItem.filter(':enabled').filter(':checked').length; this.$selectAll.add(this.$selectAll_).prop('checked', checkAll); this.$selectItem.each(function () { $(this).parents('tr')[$(this).prop('checked') ? 'addClass' : 'removeClass']('selected'); }); }; BootstrapTable.prototype.updateRows = function (checked) { var that = this; this.$selectItem.each(function () { that.data[$(this).data('index')][that.header.stateField] = checked; }); }; BootstrapTable.prototype.resetRows = function () { var that = this; $.each(this.data, function (i, row) { that.$selectAll.prop('checked', false); that.$selectItem.prop('checked', false); row[that.header.stateField] = false; }); }; BootstrapTable.prototype.trigger = function (name) { var args = Array.prototype.slice.call(arguments, 1); name += '.bs.table'; this.options[BootstrapTable.EVENTS[name]].apply(this.options, args); this.$el.trigger($.Event(name), args); this.options.onAll(name, args); this.$el.trigger($.Event('all.bs.table'), [name, args]); }; BootstrapTable.prototype.resetHeader = function () { var that = this, $fixedHeader = this.$container.find('.fixed-table-header'), $fixedBody = this.$container.find('.fixed-table-body'), scrollWidth = this.$el.width() > $fixedBody.width() ? getScrollBarWidth() : 0; // fix #61: the hidden table reset header bug. if (this.$el.is(':hidden')) { clearTimeout(this.timeoutId_); // doesn't matter if it's 0 this.timeoutId_ = setTimeout($.proxy(this.resetHeader, this), 100); // 100ms return; } this.$header_ = this.$header.clone(true, true); this.$selectAll_ = this.$header_.find('[name="btSelectAll"]'); // fix bug: get $el.css('width') error sometime (height = 500) setTimeout(function () { $fixedHeader.css({ 'height': '37px', 'border-bottom': '1px solid #dddddd', 'margin-right': scrollWidth }).find('table').css('width', that.$el.css('width')) .html('').attr('class', that.$el.attr('class')) .append(that.$header_); // fix bug: $.data() is not working as expected after $.append() that.$header.find('th').each(function (i) { that.$header_.find('th').eq(i).data($(this).data()); }); that.$body.find('tr:first-child:not(.no-records-found) > *').each(function(i) { that.$header_.find('div.fht-cell').eq(i).width($(this).innerWidth()); }); that.$el.css('margin-top', -that.$header.height()); // horizontal scroll event $fixedBody.off('scroll').on('scroll', function () { $fixedHeader.scrollLeft($(this).scrollLeft()); }); that.trigger('post-header'); }); }; BootstrapTable.prototype.toggleColumn = function (index, checked, needUpdate) { if (index === -1) { return; } this.options.columns[index].visible = checked; this.initHeader(); this.initSearch(); this.initPagination(); this.initBody(); if (this.options.showColumns) { var $items = this.$toolbar.find('.keep-open input').prop('disabled', false); if (needUpdate) { $items.filter(sprintf('[value="%s"]', index)).prop('checked', checked); } if ($items.filter(':checked').length <= this.options.minimumCountColumns) { $items.filter(':checked').prop('disabled', true); } } }; // PUBLIC FUNCTION DEFINITION // ======================= BootstrapTable.prototype.resetView = function (params) { var that = this, header = this.header; if (params && params.height) { this.options.height = params.height; } this.$selectAll.prop('checked', this.$selectItem.length > 0 && this.$selectItem.length === this.$selectItem.filter(':checked').length); if (this.options.height) { var toolbarHeight = +this.$toolbar.children().outerHeight(true), paginationHeight = +this.$pagination.children().outerHeight(true), height = this.options.height - toolbarHeight - paginationHeight; this.$container.find('.fixed-table-container').css('height', height + 'px'); } if (this.options.cardView) { // remove the element css that.$el.css('margin-top', '0'); that.$container.find('.fixed-table-container').css('padding-bottom', '0'); return; } if (this.options.showHeader && this.options.height) { this.resetHeader(); } else { this.trigger('post-header'); } if (this.options.height && this.options.showHeader) { this.$container.find('.fixed-table-container').css('padding-bottom', '37px'); } }; BootstrapTable.prototype.getData = function () { return (this.searchText || !$.isEmptyObject(this.filterColumns)) ? this.data : this.options.data; }; BootstrapTable.prototype.load = function (data) { // #431: support pagination if (this.options.sidePagination === 'server') { this.options.totalRows = data.total; data = data.rows; } this.initData(data); this.initSearch(); this.initPagination(); this.initBody(); }; BootstrapTable.prototype.append = function (data) { this.initData(data, 'append'); this.initSearch(); this.initPagination(); this.initBody(true); }; BootstrapTable.prototype.prepend = function (data) { this.initData(data, 'prepend'); this.initSearch(); this.initPagination(); this.initBody(true); }; BootstrapTable.prototype.remove = function (params) { var len = this.options.data.length, i, row; if (!params.hasOwnProperty('field') || !params.hasOwnProperty('values')) { return; } for (i = len - 1; i >= 0; i--) { row = this.options.data[i]; if (!row.hasOwnProperty(params.field)) { return; } if ($.inArray(row[params.field], params.values) !== -1) { this.options.data.splice(i, 1); } } if (len === this.options.data.length) { return; } this.initSearch(); this.initPagination(); this.initBody(true); }; BootstrapTable.prototype.insertRow = function (params) { if (!params.hasOwnProperty('index') || !params.hasOwnProperty('row')) { return; } this.data.splice(params.index, 0, params.row); this.initBody(true); }; BootstrapTable.prototype.updateRow = function (params) { if (!params.hasOwnProperty('index') || !params.hasOwnProperty('row')) { return; } $.extend(this.data[params.index], params.row); this.initBody(true); }; BootstrapTable.prototype.mergeCells = function (options) { var row = options.index, col = $.inArray(options.field, this.header.fields), rowspan = options.rowspan || 1, colspan = options.colspan || 1, i, j, $tr = this.$body.find('tr'), $td = $tr.eq(row).find('td').eq(col); if (row < 0 || col < 0 || row >= this.data.length) { return; } for (i = row; i < row + rowspan; i++) { for (j = col; j < col + colspan; j++) { $tr.eq(i).find('td').eq(j).hide(); } } $td.attr('rowspan', rowspan).attr('colspan', colspan).show(); }; BootstrapTable.prototype.getOptions = function () { return this.options; }; BootstrapTable.prototype.getSelections = function () { var that = this; return $.grep(this.data, function (row) { return row[that.header.stateField]; }); }; BootstrapTable.prototype.checkAll = function () { this.checkAll_(true); }; BootstrapTable.prototype.uncheckAll = function () { this.checkAll_(false); }; BootstrapTable.prototype.checkAll_ = function (checked) { var rows; if(!checked) { rows = this.getSelections(); } this.$selectItem.filter(':enabled').prop('checked', checked); this.updateRows(checked); this.updateSelected(); if(checked) { rows = this.getSelections(); } this.trigger(checked ? 'check-all' : 'uncheck-all', rows); }; BootstrapTable.prototype.check = function (index) { this.check_(true, index); }; BootstrapTable.prototype.uncheck = function (index) { this.check_(false, index); }; BootstrapTable.prototype.check_ = function (checked, index) { this.$selectItem.filter(sprintf('[data-index="%s"]', index)).prop('checked', checked); this.data[index][this.header.stateField] = checked; this.updateSelected(); this.trigger(checked ? 'check' : 'uncheck', this.data[index]); }; BootstrapTable.prototype.checkBy = function (obj) { this.checkBy_(true, obj); }; BootstrapTable.prototype.uncheckBy = function (obj) { this.checkBy_(false, obj); }; BootstrapTable.prototype.checkBy_ = function (checked, obj) { if(!obj.hasOwnProperty('field') || !obj.hasOwnProperty('values')) { return; } var that = this; $.each(this.options.data, function (index, row) { if (!row.hasOwnProperty(obj.field)) { return false; } if ($.inArray(row[obj.field], obj.values) !== -1) { that.$selectItem.filter(sprintf('[data-index="%s"]', index)).prop('checked', checked); row[that.header.stateField] = checked; that.trigger(checked ? 'check' : 'uncheck', row); } }); this.updateSelected(); }; BootstrapTable.prototype.destroy = function () { this.$el.insertBefore(this.$container); $(this.options.toolbar).insertBefore(this.$el); this.$container.next().remove(); this.$container.remove(); this.$el.html(this.$el_.html()) .css('margin-top', '0') .attr('class', this.$el_.attr('class') || ''); // reset the class }; BootstrapTable.prototype.showLoading = function () { this.$loading.show(); }; BootstrapTable.prototype.hideLoading = function () { this.$loading.hide(); }; BootstrapTable.prototype.togglePagination = function () { this.options.pagination = !this.options.pagination; var button = this.$toolbar.find('button[name="paginationSwitch"] i'); if (this.options.pagination) { button.attr("class", this.options.iconsPrefix + " " + this.options.icons.paginationSwitchDown); } else { button.attr("class", this.options.iconsPrefix + " " + this.options.icons.paginationSwitchUp); } this.updatePagination(); }; BootstrapTable.prototype.refresh = function (params) { if (params && params.url) { this.options.url = params.url; this.options.pageNumber = 1; } this.initServer(params && params.silent, params && params.query); }; BootstrapTable.prototype.showColumn = function (field) { this.toggleColumn(getFieldIndex(this.options.columns, field), true, true); }; BootstrapTable.prototype.hideColumn = function (field) { this.toggleColumn(getFieldIndex(this.options.columns, field), false, true); }; BootstrapTable.prototype.filterBy = function (columns) { this.filterColumns = $.isEmptyObject(columns) ? {}: columns; this.options.pageNumber = 1; this.initSearch(); this.updatePagination(); }; BootstrapTable.prototype.scrollTo = function (value) { var $tbody = this.$container.find('.fixed-table-body'); if (typeof value === 'string') { value = value === 'bottom' ? $tbody[0].scrollHeight : 0; } if (typeof value === 'number') { $tbody.scrollTop(value); } }; BootstrapTable.prototype.selectPage = function (page) { if (page > 0 && page <= this.options.totalPages) { this.options.pageNumber = page; this.updatePagination(); } }; BootstrapTable.prototype.prevPage = function () { if (this.options.pageNumber > 1) { this.options.pageNumber--; this.updatePagination(); } }; BootstrapTable.prototype.nextPage = function () { if (this.options.pageNumber < this.options.totalPages) { this.options.pageNumber++; this.updatePagination(); } }; BootstrapTable.prototype.toggleView = function () { this.options.cardView = !this.options.cardView; this.initHeader(); this.initBody(); }; // BOOTSTRAP TABLE PLUGIN DEFINITION // ======================= var allowedMethods = [ 'getOptions', 'getSelections', 'getData', 'load', 'append', 'prepend', 'remove', 'insertRow', 'updateRow', 'mergeCells', 'checkAll', 'uncheckAll', 'check', 'uncheck', 'checkBy', 'uncheckBy', 'refresh', 'resetView', 'destroy', 'showLoading', 'hideLoading', 'showColumn', 'hideColumn', 'filterBy', 'scrollTo', 'selectPage', 'prevPage', 'nextPage', 'togglePagination', 'toggleView' ]; $.fn.bootstrapTable = function (option, _relatedTarget) { var value; this.each(function () { var $this = $(this), data = $this.data('bootstrap.table'), options = $.extend({}, BootstrapTable.DEFAULTS, $this.data(), typeof option === 'object' && option); if (typeof option === 'string') { if ($.inArray(option, allowedMethods) < 0) { throw "Unknown method: " + option; } if (!data) { return; } value = data[option](_relatedTarget); if (option === 'destroy') { $this.removeData('bootstrap.table'); } } if (!data) { $this.data('bootstrap.table', (data = new BootstrapTable(this, options))); } }); return typeof value === 'undefined' ? this : value; }; $.fn.bootstrapTable.Constructor = BootstrapTable; $.fn.bootstrapTable.defaults = BootstrapTable.DEFAULTS; $.fn.bootstrapTable.columnDefaults = BootstrapTable.COLUMN_DEFAULTS; $.fn.bootstrapTable.locales = BootstrapTable.LOCALES; $.fn.bootstrapTable.methods = allowedMethods; // BOOTSTRAP TABLE INIT // ======================= $(function () { $('[data-toggle="table"]').bootstrapTable(); }); }(jQuery);
src/bootstrap-table.js
/** * @author zhixin wen <[email protected]> * version: 1.6.0 * https://github.com/wenzhixin/bootstrap-table/ */ !function ($) { 'use strict'; // TOOLS DEFINITION // ====================== // it only does '%s', and return '' when arguments are undefined var sprintf = function(str) { var args = arguments, flag = true, i = 1; str = str.replace(/%s/g, function () { var arg = args[i++]; if (typeof arg === 'undefined') { flag = false; return ''; } return arg; }); return flag ? str : ''; }; var getPropertyFromOther = function (list, from, to, value) { var result = ''; $.each(list, function (i, item) { if (item[from] === value) { result = item[to]; return false; } return true; }); return result; }; var getFieldIndex = function (columns, field) { var index = -1; $.each(columns, function (i, column) { if (column.field === field) { index = i; return false; } return true; }); return index; }; var getScrollBarWidth = function () { var inner = $('<p/>').addClass('fixed-table-scroll-inner'), outer = $('<div/>').addClass('fixed-table-scroll-outer'), w1, w2; outer.append(inner); $('body').append(outer); w1 = inner[0].offsetWidth; outer.css('overflow', 'scroll'); w2 = inner[0].offsetWidth; if (w1 === w2) { w2 = outer[0].clientWidth; } outer.remove(); return w1 - w2; }; var calculateObjectValue = function (self, name, args, defaultValue) { if (typeof name === 'string') { // support obj.func1.func2 var names = name.split('.'); if (names.length > 1) { name = window; $.each(names, function (i, f) { name = name[f]; }); } else { name = window[name]; } } if (typeof name === 'object') { return name; } if (typeof name === 'function') { return name.apply(self, args); } return defaultValue; }; var escapeHTML = function (text) { if (typeof text === 'string') { return text .replace(/&/g, "&amp;") .replace(/</g, "&lt;") .replace(/>/g, "&gt;") .replace(/"/g, "&quot;") .replace(/'/g, "&#039;"); } return text; }; // BOOTSTRAP TABLE CLASS DEFINITION // ====================== var BootstrapTable = function (el, options) { this.options = options; this.$el = $(el); this.$el_ = this.$el.clone(); this.timeoutId_ = 0; this.init(); }; BootstrapTable.DEFAULTS = { classes: 'table table-hover', height: undefined, undefinedText: '-', sortName: undefined, sortOrder: 'asc', striped: false, columns: [], data: [], method: 'get', url: undefined, cache: true, contentType: 'application/json', dataType: 'json', ajaxOptions: {}, queryParams: function (params) {return params;}, queryParamsType: 'limit', // undefined responseHandler: function (res) {return res;}, pagination: false, sidePagination: 'client', // client or server totalRows: 0, // server side need to set pageNumber: 1, pageSize: 10, pageList: [10, 25, 50, 100], search: false, searchAlign: 'right', selectItemName: 'btSelectItem', showHeader: true, showColumns: false, showPaginationSwitch: false, showRefresh: false, showToggle: false, buttonsAlign: 'right', smartDisplay: true, minimumCountColumns: 1, idField: undefined, cardView: false, trimOnSearch: true, clickToSelect: false, singleSelect: false, toolbar: undefined, toolbarAlign: 'left', checkboxHeader: true, sortable: true, maintainSelected: false, searchTimeOut: 500, iconSize: undefined, iconsPrefix: 'glyphicon', // glyphicon of fa (font awesome) icons: { paginationSwitchDown: 'glyphicon-collapse-down icon-chevron-down', paginationSwitchUp: 'glyphicon-collapse-up icon-chevron-up', refresh: 'glyphicon-refresh icon-refresh', toggle: 'glyphicon-list-alt icon-list-alt', columns: 'glyphicon-th icon-th' }, rowStyle: function (row, index) {return {};}, rowAttributes: function (row, index) {return {};}, onAll: function (name, args) {return false;}, onClickRow: function (item, $element) {return false;}, onDblClickRow: function (item, $element) {return false;}, onSort: function (name, order) {return false;}, onCheck: function (row) {return false;}, onUncheck: function (row) {return false;}, onCheckAll: function () {return false;}, onUncheckAll: function () {return false;}, onLoadSuccess: function (data) {return false;}, onLoadError: function (status) {return false;}, onColumnSwitch: function (field, checked) {return false;}, onPageChange: function (number, size) {return false;}, onSearch: function (text) {return false;}, onPreBody: function (data) {return false;}, onPostBody: function () {return false;}, onPostHeader: function() {return false;} }; BootstrapTable.LOCALES = []; BootstrapTable.LOCALES['en-US'] = { formatLoadingMessage: function () { return 'Loading, please wait...'; }, formatRecordsPerPage: function (pageNumber) { return sprintf('%s records per page', pageNumber); }, formatShowingRows: function (pageFrom, pageTo, totalRows) { return sprintf('Showing %s to %s of %s rows', pageFrom, pageTo, totalRows); }, formatSearch: function () { return 'Search'; }, formatNoMatches: function () { return 'No matching records found'; }, formatPaginationSwitch: function () { return 'Hide/Show pagination'; }, formatRefresh: function () { return 'Refresh'; }, formatToggle: function () { return 'Toggle'; }, formatColumns: function () { return 'Columns'; } }; $.extend(BootstrapTable.DEFAULTS, BootstrapTable.LOCALES['en-US']); BootstrapTable.COLUMN_DEFAULTS = { radio: false, checkbox: false, checkboxEnabled: true, field: undefined, title: undefined, 'class': undefined, align: undefined, // left, right, center halign: undefined, // left, right, center valign: undefined, // top, middle, bottom width: undefined, sortable: false, order: 'asc', // asc, desc visible: true, switchable: true, clickToSelect: true, formatter: undefined, events: undefined, sorter: undefined, cellStyle: undefined, searchable: true }; BootstrapTable.EVENTS = { 'all.bs.table': 'onAll', 'click-row.bs.table': 'onClickRow', 'dbl-click-row.bs.table': 'onDblClickRow', 'sort.bs.table': 'onSort', 'check.bs.table': 'onCheck', 'uncheck.bs.table': 'onUncheck', 'check-all.bs.table': 'onCheckAll', 'uncheck-all.bs.table': 'onUncheckAll', 'load-success.bs.table': 'onLoadSuccess', 'load-error.bs.table': 'onLoadError', 'column-switch.bs.table': 'onColumnSwitch', 'page-change.bs.table': 'onPageChange', 'search.bs.table': 'onSearch', 'pre-body.bs.table': 'onPreBody', 'post-body.bs.table': 'onPostBody', 'post-header.bs.table' : 'onPostHeader' }; BootstrapTable.prototype.init = function () { this.initContainer(); this.initTable(); this.initHeader(); this.initData(); this.initToolbar(); this.initPagination(); this.initBody(); this.initServer(); }; BootstrapTable.prototype.initContainer = function () { this.$container = $([ '<div class="bootstrap-table">', '<div class="fixed-table-toolbar"></div>', '<div class="fixed-table-container">', '<div class="fixed-table-header"><table></table></div>', '<div class="fixed-table-body">', '<div class="fixed-table-loading">', this.options.formatLoadingMessage(), '</div>', '</div>', '<div class="fixed-table-pagination"></div>', '</div>', '</div>'].join('')); this.$container.insertAfter(this.$el); this.$container.find('.fixed-table-body').append(this.$el); this.$container.after('<div class="clearfix"></div>'); this.$loading = this.$container.find('.fixed-table-loading'); this.$el.addClass(this.options.classes); if (this.options.striped) { this.$el.addClass('table-striped'); } }; BootstrapTable.prototype.initTable = function () { var that = this, columns = [], data = []; this.$header = this.$el.find('thead'); if (!this.$header.length) { this.$header = $('<thead></thead>').appendTo(this.$el); } if (!this.$header.find('tr').length) { this.$header.append('<tr></tr>'); } this.$header.find('th').each(function () { var column = $.extend({}, { title: $(this).html(), 'class': $(this).attr('class') }, $(this).data()); columns.push(column); }); this.options.columns = $.extend([], columns, this.options.columns); $.each(this.options.columns, function (i, column) { that.options.columns[i] = $.extend({}, BootstrapTable.COLUMN_DEFAULTS, {field: i}, column); // when field is undefined, use index instead }); // if options.data is setting, do not process tbody data if (this.options.data.length) { return; } this.$el.find('tbody tr').each(function () { var row = {}; // save tr's id and class row._id = $(this).attr('id'); row._class = $(this).attr('class'); $(this).find('td').each(function (i) { var field = that.options.columns[i].field; row[field] = $(this).html(); // save td's id and class row['_' + field + '_id'] = $(this).attr('id'); row['_' + field + '_class'] = $(this).attr('class'); }); data.push(row); }); this.options.data = data; }; BootstrapTable.prototype.initHeader = function () { var that = this, visibleColumns = [], html = []; this.header = { fields: [], styles: [], classes: [], formatters: [], events: [], sorters: [], cellStyles: [], clickToSelects: [], searchables: [] }; $.each(this.options.columns, function (i, column) { var text = '', halign = '', // header align style align = '', // body align style style = '', class_ = sprintf(' class="%s"', column['class']), order = that.options.sortOrder || column.order, searchable = true; if (!column.visible) { return; } halign = sprintf('text-align: %s; ', column.halign ? column.halign : column.align); align = sprintf('text-align: %s; ', column.align); style = sprintf('vertical-align: %s; ', column.valign); style += sprintf('width: %spx; ', column.checkbox || column.radio ? 36 : column.width); visibleColumns.push(column); that.header.fields.push(column.field); that.header.styles.push(align + style); that.header.classes.push(class_); that.header.formatters.push(column.formatter); that.header.events.push(column.events); that.header.sorters.push(column.sorter); that.header.cellStyles.push(column.cellStyle); that.header.clickToSelects.push(column.clickToSelect); that.header.searchables.push(column.searchable); html.push('<th', column.checkbox || column.radio ? sprintf(' class="bs-checkbox %s"', column['class'] || '') : class_, sprintf(' style="%s"', halign + style), '>'); html.push(sprintf('<div class="th-inner %s">', that.options.sortable && column.sortable ? 'sortable' : '')); text = column.title; if (that.options.sortName === column.field && that.options.sortable && column.sortable) { text += that.getCaretHtml(); } if (column.checkbox) { if (!that.options.singleSelect && that.options.checkboxHeader) { text = '<input name="btSelectAll" type="checkbox" />'; } that.header.stateField = column.field; } if (column.radio) { text = ''; that.header.stateField = column.field; that.options.singleSelect = true; } html.push(text); html.push('</div>'); html.push('<div class="fht-cell"></div>'); html.push('</th>'); }); this.$header.find('tr').html(html.join('')); this.$header.find('th').each(function (i) { $(this).data(visibleColumns[i]); }); this.$container.off('click', 'th').on('click', 'th', function (event) { if (that.options.sortable && $(this).data().sortable) { that.onSort(event); } }); if (!this.options.showHeader || this.options.cardView) { this.$header.hide(); this.$container.find('.fixed-table-header').hide(); this.$loading.css('top', 0); } else { this.$header.show(); this.$container.find('.fixed-table-header').show(); this.$loading.css('top', '37px'); } this.$selectAll = this.$header.find('[name="btSelectAll"]'); this.$container.off('click', '[name="btSelectAll"]') .on('click', '[name="btSelectAll"]', function () { var checked = $(this).prop('checked'); that[checked ? 'checkAll' : 'uncheckAll'](); }); }; /** * @param data * @param type: append / prepend */ BootstrapTable.prototype.initData = function (data, type) { if (type === 'append') { this.data = this.data.concat(data); } else if (type === 'prepend') { this.data = [].concat(data).concat(this.data); } else { this.data = data || this.options.data; } this.options.data = this.data; if (this.options.sidePagination === 'server') { return; } this.initSort(); }; BootstrapTable.prototype.initSort = function () { var that = this, name = this.options.sortName, order = this.options.sortOrder === 'desc' ? -1 : 1, index = $.inArray(this.options.sortName, this.header.fields); if (index !== -1) { this.data.sort(function (a, b) { var aa = a[name], bb = b[name], value = calculateObjectValue(that.header, that.header.sorters[index], [aa, bb]); if (value !== undefined) { return order * value; } // Convert numerical values form string to float. if ($.isNumeric(aa)) { aa = parseFloat(aa); } if ($.isNumeric(bb)) { bb = parseFloat(bb); } // Fix #161: undefined or null string sort bug. if (aa === undefined || aa === null) { aa = ''; } if (bb === undefined || bb === null) { bb = ''; } if ($.isNumeric(aa) && $.isNumeric(bb)) { if (aa < bb) { return order * -1; } return order; } if (aa === bb) { return 0; } if (aa.localeCompare(bb) === -1) { return order * -1; } return order; }); } }; BootstrapTable.prototype.onSort = function (event) { var $this = $(event.currentTarget), $this_ = this.$header.find('th').eq($this.index()); this.$header.add(this.$header_).find('span.order').remove(); if (this.options.sortName === $this.data('field')) { this.options.sortOrder = this.options.sortOrder === 'asc' ? 'desc' : 'asc'; } else { this.options.sortName = $this.data('field'); this.options.sortOrder = $this.data('order') === 'asc' ? 'desc' : 'asc'; } this.trigger('sort', this.options.sortName, this.options.sortOrder); $this.add($this_).data('order', this.options.sortOrder) .find('.th-inner').append(this.getCaretHtml()); if (this.options.sidePagination === 'server') { this.initServer(); return; } this.initSort(); this.initBody(); }; BootstrapTable.prototype.initToolbar = function () { var that = this, html = [], timeoutId = 0, $keepOpen, $search, switchableCount = 0; this.$toolbar = this.$container.find('.fixed-table-toolbar').html(''); if (typeof this.options.toolbar === 'string') { $(sprintf('<div class="bars pull-%s"></div>', this.options.toolbarAlign)) .appendTo(this.$toolbar) .append($(this.options.toolbar)); } // showColumns, showToggle, showRefresh html = [sprintf('<div class="columns columns-%s btn-group pull-%s">', this.options.buttonsAlign, this.options.buttonsAlign)]; if (typeof this.options.icons === 'string') { this.options.icons = calculateObjectValue(null, this.options.icons); } if (this.options.showPaginationSwitch) { html.push(sprintf('<button class="btn btn-default" type="button" name="paginationSwitch" title="%s">', this.options.formatPaginationSwitch()), sprintf('<i class="%s %s"></i>', this.options.iconsPrefix, this.options.icons.paginationSwitchDown), '</button>'); } if (this.options.showRefresh) { html.push(sprintf('<button class="btn btn-default' + (this.options.iconSize == undefined ? '' : ' btn-' + this.options.iconSize) + '" type="button" name="refresh" title="%s">', this.options.formatRefresh()), sprintf('<i class="%s %s"></i>', this.options.iconsPrefix, this.options.icons.refresh), '</button>'); } if (this.options.showToggle) { html.push(sprintf('<button class="btn btn-default' + (this.options.iconSize == undefined ? '' : ' btn-' + this.options.iconSize) + '" type="button" name="toggle" title="%s">', this.options.formatToggle()), sprintf('<i class="%s %s"></i>', this.options.iconsPrefix, this.options.icons.toggle), '</button>'); } if (this.options.showColumns) { html.push(sprintf('<div class="keep-open btn-group" title="%s">', this.options.formatColumns()), '<button type="button" class="btn btn-default' + (this.options.iconSize == undefined ? '' : ' btn-' + this.options.iconSize) + ' dropdown-toggle" data-toggle="dropdown">', sprintf('<i class="%s %s"></i>', this.options.iconsPrefix, this.options.icons.columns), ' <span class="caret"></span>', '</button>', '<ul class="dropdown-menu" role="menu">'); $.each(this.options.columns, function (i, column) { if (column.radio || column.checkbox) { return; } var checked = column.visible ? ' checked="checked"' : ''; if (column.switchable) { html.push(sprintf('<li>' + '<label><input type="checkbox" data-field="%s" value="%s"%s> %s</label>' + '</li>', column.field, i, checked, column.title)); switchableCount++; } }); html.push('</ul>', '</div>'); } html.push('</div>'); // Fix #188: this.showToolbar is for extentions if (this.showToolbar || html.length > 2) { this.$toolbar.append(html.join('')); } if (this.options.showPaginationSwitch) { this.$toolbar.find('button[name="paginationSwitch"]') .off('click').on('click', $.proxy(this.togglePagination, this)); } if (this.options.showRefresh) { this.$toolbar.find('button[name="refresh"]') .off('click').on('click', $.proxy(this.refresh, this)); } if (this.options.showToggle) { this.$toolbar.find('button[name="toggle"]') .off('click').on('click', function () { that.options.cardView = !that.options.cardView; that.initHeader(); that.initBody(); }); } if (this.options.showColumns) { $keepOpen = this.$toolbar.find('.keep-open'); if (switchableCount <= this.options.minimumCountColumns) { $keepOpen.find('input').prop('disabled', true); } $keepOpen.find('li').off('click').on('click', function (event) { event.stopImmediatePropagation(); }); $keepOpen.find('input').off('click').on('click', function () { var $this = $(this); that.toggleColumn($this.val(), $this.prop('checked'), false); that.trigger('column-switch', $(this).data('field'), $this.prop('checked')); }); } if (this.options.search) { html = []; html.push( '<div class="pull-' + this.options.searchAlign + ' search">', sprintf('<input class="form-control' + (this.options.iconSize == undefined ? '' : ' input-' + this.options.iconSize) + '" type="text" placeholder="%s">', this.options.formatSearch()), '</div>'); this.$toolbar.append(html.join('')); $search = this.$toolbar.find('.search input'); $search.off('keyup').on('keyup', function (event) { clearTimeout(timeoutId); // doesn't matter if it's 0 timeoutId = setTimeout(function () { that.onSearch(event); }, that.options.searchTimeOut); }); } }; BootstrapTable.prototype.onSearch = function (event) { var text = $.trim($(event.currentTarget).val()); // trim search input if(this.options.trimOnSearch) { $(event.currentTarget).val(text); } if (text === this.searchText) { return; } this.searchText = text; this.options.pageNumber = 1; this.initSearch(); this.updatePagination(); this.trigger('search', text); }; BootstrapTable.prototype.initSearch = function () { var that = this; if (this.options.sidePagination !== 'server') { var s = this.searchText && this.searchText.toLowerCase(); var f = $.isEmptyObject(this.filterColumns) ? null: this.filterColumns; // Check filter this.data = f ? $.grep(this.options.data, function (item, i) { for (var key in f) { if (item[key] !== f[key]) { return false; } } return true; }) : this.options.data; this.data = s ? $.grep(this.data, function (item, i) { for (var key in item) { key = $.isNumeric(key) ? parseInt(key, 10) : key; var value = item[key]; // Fix #142: search use formated data value = calculateObjectValue(that.header, that.header.formatters[$.inArray(key, that.header.fields)], [value, item, i], value); var index = $.inArray(key, that.header.fields); if (index !== -1 && that.header.searchables[index] && (typeof value === 'string' || typeof value === 'number') && (value + '').toLowerCase().indexOf(s) !== -1) { return true; } } return false; }) : this.data; } }; BootstrapTable.prototype.initPagination = function () { this.$pagination = this.$container.find('.fixed-table-pagination'); if (!this.options.pagination) { this.$pagination.hide(); return; } else { this.$pagination.show(); } var that = this, html = [], i, from, to, $pageList, $first, $pre, $next, $last, $number, data = this.getData(); if (this.options.sidePagination !== 'server') { this.options.totalRows = data.length; } this.totalPages = 0; if (this.options.totalRows) { this.totalPages = ~~((this.options.totalRows - 1) / this.options.pageSize) + 1; this.options.totalPages = this.totalPages; } if (this.totalPages > 0 && this.options.pageNumber > this.totalPages) { this.options.pageNumber = this.totalPages; } this.pageFrom = (this.options.pageNumber - 1) * this.options.pageSize + 1; this.pageTo = this.options.pageNumber * this.options.pageSize; if (this.pageTo > this.options.totalRows) { this.pageTo = this.options.totalRows; } html.push( '<div class="pull-left pagination-detail">', '<span class="pagination-info">', this.options.formatShowingRows(this.pageFrom, this.pageTo, this.options.totalRows), '</span>'); html.push('<span class="page-list">'); var pageNumber = [ '<span class="btn-group dropup">', '<button type="button" class="btn btn-default '+ (this.options.iconSize == undefined ? '' : ' btn-' + this.options.iconSize)+ ' dropdown-toggle" data-toggle="dropdown">', '<span class="page-size">', this.options.pageSize, '</span>', ' <span class="caret"></span>', '</button>', '<ul class="dropdown-menu" role="menu">'], pageList = this.options.pageList; if (typeof this.options.pageList === 'string') { var list = this.options.pageList.replace('[', '').replace(']', '').replace(/ /g, '').split(','); pageList = []; $.each(list, function (i, value) { pageList.push(+value); }); } $.each(pageList, function (i, page) { if (!that.options.smartDisplay || i === 0 || pageList[i-1] <= that.options.totalRows) { var active = page === that.options.pageSize ? ' class="active"' : ''; pageNumber.push(sprintf('<li%s><a href="javascript:void(0)">%s</a></li>', active, page)); } }); pageNumber.push('</ul></span>'); html.push(this.options.formatRecordsPerPage(pageNumber.join(''))); html.push('</span>'); html.push('</div>', '<div class="pull-right pagination">', '<ul class="pagination' + (this.options.iconSize == undefined ? '' : ' pagination-' + this.options.iconSize) + '">', '<li class="page-first"><a href="javascript:void(0)">&lt;&lt;</a></li>', '<li class="page-pre"><a href="javascript:void(0)">&lt;</a></li>'); if (this.totalPages < 5) { from = 1; to = this.totalPages; } else { from = this.options.pageNumber - 2; to = from + 4; if (from < 1) { from = 1; to = 5; } if (to > this.totalPages) { to = this.totalPages; from = to - 4; } } for (i = from; i <= to; i++) { html.push('<li class="page-number' + (i === this.options.pageNumber ? ' active' : '') + '">', '<a href="javascript:void(0)">', i ,'</a>', '</li>'); } html.push( '<li class="page-next"><a href="javascript:void(0)">&gt;</a></li>', '<li class="page-last"><a href="javascript:void(0)">&gt;&gt;</a></li>', '</ul>', '</div>'); this.$pagination.html(html.join('')); $pageList = this.$pagination.find('.page-list a'); $first = this.$pagination.find('.page-first'); $pre = this.$pagination.find('.page-pre'); $next = this.$pagination.find('.page-next'); $last = this.$pagination.find('.page-last'); $number = this.$pagination.find('.page-number'); if (this.options.pageNumber <= 1) { $first.addClass('disabled'); $pre.addClass('disabled'); } if (this.options.pageNumber >= this.totalPages) { $next.addClass('disabled'); $last.addClass('disabled'); } if (this.options.smartDisplay) { if (this.totalPages <= 1) { this.$pagination.find('div.pagination').hide(); } if (this.options.pageList.length < 2 || this.options.totalRows <= this.options.pageList[0]) { this.$pagination.find('span.page-list').hide(); } // when data is empty, hide the pagination this.$pagination[this.getData().length ? 'show' : 'hide'](); } $pageList.off('click').on('click', $.proxy(this.onPageListChange, this)); $first.off('click').on('click', $.proxy(this.onPageFirst, this)); $pre.off('click').on('click', $.proxy(this.onPagePre, this)); $next.off('click').on('click', $.proxy(this.onPageNext, this)); $last.off('click').on('click', $.proxy(this.onPageLast, this)); $number.off('click').on('click', $.proxy(this.onPageNumber, this)); }; BootstrapTable.prototype.updatePagination = function (event) { // Fix #171: IE disabled button can be clicked bug. if (event && $(event.currentTarget).hasClass('disabled')) { return; } if (!this.options.maintainSelected) { this.resetRows(); } this.initPagination(); if (this.options.sidePagination === 'server') { this.initServer(); } else { this.initBody(); } this.trigger('page-change', this.options.pageNumber, this.options.pageSize); }; BootstrapTable.prototype.onPageListChange = function (event) { var $this = $(event.currentTarget); $this.parent().addClass('active').siblings().removeClass('active'); this.options.pageSize = +$this.text(); this.$toolbar.find('.page-size').text(this.options.pageSize); this.updatePagination(event); }; BootstrapTable.prototype.onPageFirst = function (event) { this.options.pageNumber = 1; this.updatePagination(event); }; BootstrapTable.prototype.onPagePre = function (event) { this.options.pageNumber--; this.updatePagination(event); }; BootstrapTable.prototype.onPageNext = function (event) { this.options.pageNumber++; this.updatePagination(event); }; BootstrapTable.prototype.onPageLast = function (event) { this.options.pageNumber = this.totalPages; this.updatePagination(event); }; BootstrapTable.prototype.onPageNumber = function (event) { if (this.options.pageNumber === +$(event.currentTarget).text()) { return; } this.options.pageNumber = +$(event.currentTarget).text(); this.updatePagination(event); }; BootstrapTable.prototype.initBody = function (fixedScroll) { var that = this, html = [], data = this.getData(); this.trigger('pre-body', data); this.$body = this.$el.find('tbody'); if (!this.$body.length) { this.$body = $('<tbody></tbody>').appendTo(this.$el); } //Fix #389 Bootstrap-table-flatJSON is not working if (!this.options.pagination || this.options.sidePagination === 'server') { this.pageFrom = 1; this.pageTo = data.length; } for (var i = this.pageFrom - 1; i < this.pageTo; i++) { var item = data[i], style = {}, csses = [], attributes = {}, htmlAttributes = []; style = calculateObjectValue(this.options, this.options.rowStyle, [item, i], style); if (style && style.css) { for (var key in style.css) { csses.push(key + ': ' + style.css[key]); } } attributes = calculateObjectValue(this.options, this.options.rowAttributes, [item, i], attributes); if (attributes) { for (var key in attributes) { htmlAttributes.push(sprintf('%s="%s"', key, escapeHTML(attributes[key]))); } } html.push('<tr', sprintf(' %s', htmlAttributes.join(' ')), sprintf(' id="%s"', $.isArray(item) ? undefined : item._id), sprintf(' class="%s"', style.classes || ($.isArray(item) ? undefined : item._class)), sprintf(' data-index="%s"', i), '>' ); if (this.options.cardView) { html.push(sprintf('<td colspan="%s">', this.header.fields.length)); } $.each(this.header.fields, function (j, field) { var text = '', value = item[field], type = '', cellStyle = {}, id_ = '', class_ = that.header.classes[j], column = that.options.columns[getFieldIndex(that.options.columns, field)]; style = sprintf('style="%s"', csses.concat(that.header.styles[j]).join('; ')); value = calculateObjectValue(that.header, that.header.formatters[j], [value, item, i], value); // handle td's id and class if (item['_' + field + '_id']) { id_ = sprintf(' id="%s"', item['_' + field + '_id']); } if (item['_' + field + '_class']) { class_ = sprintf(' class="%s"', item['_' + field + '_class']); } cellStyle = calculateObjectValue(that.header, that.header.cellStyles[j], [value, item, i], cellStyle); if (cellStyle.classes) { class_ = sprintf(' class="%s"', cellStyle.classes); } if (cellStyle.css) { var csses_ = []; for (var key in cellStyle.css) { csses_.push(key + ': ' + cellStyle.css[key]); } style = sprintf('style="%s"', csses_.concat(that.header.styles[j]).join('; ')); } if (column.checkbox || column.radio) { type = column.checkbox ? 'checkbox' : type; type = column.radio ? 'radio' : type; text = [that.options.cardView ? '<div class="card-view">' : '<td class="bs-checkbox">', '<input' + sprintf(' data-index="%s"', i) + sprintf(' name="%s"', that.options.selectItemName) + sprintf(' type="%s"', type) + sprintf(' value="%s"', item[that.options.idField]) + sprintf(' checked="%s"', value === true || (value && value.checked) ? 'checked' : undefined) + sprintf(' disabled="%s"', !column.checkboxEnabled || (value && value.disabled) ? 'disabled' : undefined) + ' />', that.options.cardView ? '</div>' : '</td>'].join(''); } else { value = typeof value === 'undefined' || value === null ? that.options.undefinedText : value; text = that.options.cardView ? ['<div class="card-view">', that.options.showHeader ? sprintf('<span class="title" %s>%s</span>', style, getPropertyFromOther(that.options.columns, 'field', 'title', field)) : '', sprintf('<span class="value">%s</span>', value), '</div>'].join('') : [sprintf('<td%s %s %s>', id_, class_, style), value, '</td>'].join(''); // Hide empty data on Card view when smartDisplay is set to true. if (that.options.cardView && that.options.smartDisplay && value === '') { text = ''; } } html.push(text); }); if (this.options.cardView) { html.push('</td>'); } html.push('</tr>'); } // show no records if (!html.length) { html.push('<tr class="no-records-found">', sprintf('<td colspan="%s">%s</td>', this.header.fields.length, this.options.formatNoMatches()), '</tr>'); } this.$body.html(html.join('')); if (!fixedScroll) { this.scrollTo(0); } // click to select by column this.$body.find('> tr > td').off('click').on('click', function () { var $tr = $(this).parent(); that.trigger('click-row', that.data[$tr.data('index')], $tr); // if click to select - then trigger the checkbox/radio click if (that.options.clickToSelect) { if (that.header.clickToSelects[$tr.children().index($(this))]) { $tr.find(sprintf('[name="%s"]', that.options.selectItemName))[0].click(); // #144: .trigger('click') bug } } }); this.$body.find('tr').off('dblclick').on('dblclick', function () { that.trigger('dbl-click-row', that.data[$(this).data('index')], $(this)); }); this.$selectItem = this.$body.find(sprintf('[name="%s"]', this.options.selectItemName)); this.$selectItem.off('click').on('click', function (event) { event.stopImmediatePropagation(); var checked = $(this).prop('checked'), row = that.data[$(this).data('index')]; row[that.header.stateField] = checked; that.trigger(checked ? 'check' : 'uncheck', row); if (that.options.singleSelect) { that.$selectItem.not(this).each(function () { that.data[$(this).data('index')][that.header.stateField] = false; }); that.$selectItem.filter(':checked').not(this).prop('checked', false); } that.updateSelected(); }); $.each(this.header.events, function (i, events) { if (!events) { return; } // fix bug, if events is defined with namespace if (typeof events === 'string') { events = calculateObjectValue(null, events); } for (var key in events) { that.$body.find('tr').each(function () { var $tr = $(this), $td = $tr.find(that.options.cardView ? '.card-view' : 'td').eq(i), index = key.indexOf(' '), name = key.substring(0, index), el = key.substring(index + 1), func = events[key]; $td.find(el).off(name).on(name, function (e) { var index = $tr.data('index'), row = that.data[index], value = row[that.header.fields[i]]; func.apply(this, [e, value, row, index]); }); }); } }); this.updateSelected(); this.resetView(); this.trigger('post-body'); }; BootstrapTable.prototype.initServer = function (silent, query) { var that = this, data = {}, params = { pageSize: this.options.pageSize, pageNumber: this.options.pageNumber, searchText: this.searchText, sortName: this.options.sortName, sortOrder: this.options.sortOrder }; if (!this.options.url) { return; } if (this.options.queryParamsType === 'limit') { params = { search: params.searchText, sort: params.sortName, order: params.sortOrder }; if (this.options.pagination) { params.limit = this.options.pageSize; params.offset = this.options.pageSize * (this.options.pageNumber - 1); } } data = calculateObjectValue(this.options, this.options.queryParams, [params], data); $.extend(data, query || {}); // false to stop request if (data === false) { return; } if (!silent) { this.$loading.show(); } $.ajax($.extend({}, calculateObjectValue(null, this.options.ajaxOptions), { type: this.options.method, url: this.options.url, data: this.options.contentType === 'application/json' && this.options.method === 'post' ? JSON.stringify(data): data, cache: this.options.cache, contentType: this.options.contentType, dataType: this.options.dataType, success: function (res) { res = calculateObjectValue(that.options, that.options.responseHandler, [res], res); that.load(res); that.trigger('load-success', res); }, error: function (res) { that.trigger('load-error', res.status); }, complete: function () { if (!silent) { that.$loading.hide(); } } })); }; BootstrapTable.prototype.getCaretHtml = function () { return ['<span class="order' + (this.options.sortOrder === 'desc' ? '' : ' dropup') + '">', '<span class="caret" style="margin: 10px 5px;"></span>', '</span>'].join(''); }; BootstrapTable.prototype.updateSelected = function () { var checkAll = this.$selectItem.filter(':enabled').length === this.$selectItem.filter(':enabled').filter(':checked').length; this.$selectAll.add(this.$selectAll_).prop('checked', checkAll); this.$selectItem.each(function () { $(this).parents('tr')[$(this).prop('checked') ? 'addClass' : 'removeClass']('selected'); }); }; BootstrapTable.prototype.updateRows = function (checked) { var that = this; this.$selectItem.each(function () { that.data[$(this).data('index')][that.header.stateField] = checked; }); }; BootstrapTable.prototype.resetRows = function () { var that = this; $.each(this.data, function (i, row) { that.$selectAll.prop('checked', false); that.$selectItem.prop('checked', false); row[that.header.stateField] = false; }); }; BootstrapTable.prototype.trigger = function (name) { var args = Array.prototype.slice.call(arguments, 1); name += '.bs.table'; this.options[BootstrapTable.EVENTS[name]].apply(this.options, args); this.$el.trigger($.Event(name), args); this.options.onAll(name, args); this.$el.trigger($.Event('all.bs.table'), [name, args]); }; BootstrapTable.prototype.resetHeader = function () { var that = this, $fixedHeader = this.$container.find('.fixed-table-header'), $fixedBody = this.$container.find('.fixed-table-body'), scrollWidth = this.$el.width() > $fixedBody.width() ? getScrollBarWidth() : 0; // fix #61: the hidden table reset header bug. if (this.$el.is(':hidden')) { clearTimeout(this.timeoutId_); // doesn't matter if it's 0 this.timeoutId_ = setTimeout($.proxy(this.resetHeader, this), 100); // 100ms return; } this.$header_ = this.$header.clone(true, true); this.$selectAll_ = this.$header_.find('[name="btSelectAll"]'); // fix bug: get $el.css('width') error sometime (height = 500) setTimeout(function () { $fixedHeader.css({ 'height': '37px', 'border-bottom': '1px solid #dddddd', 'margin-right': scrollWidth }).find('table').css('width', that.$el.css('width')) .html('').attr('class', that.$el.attr('class')) .append(that.$header_); // fix bug: $.data() is not working as expected after $.append() that.$header.find('th').each(function (i) { that.$header_.find('th').eq(i).data($(this).data()); }); that.$body.find('tr:first-child:not(.no-records-found) > *').each(function(i) { that.$header_.find('div.fht-cell').eq(i).width($(this).innerWidth()); }); that.$el.css('margin-top', -that.$header.height()); // horizontal scroll event $fixedBody.off('scroll').on('scroll', function () { $fixedHeader.scrollLeft($(this).scrollLeft()); }); that.trigger('post-header'); }); }; BootstrapTable.prototype.toggleColumn = function (index, checked, needUpdate) { if (index === -1) { return; } this.options.columns[index].visible = checked; this.initHeader(); this.initSearch(); this.initPagination(); this.initBody(); if (this.options.showColumns) { var $items = this.$toolbar.find('.keep-open input').prop('disabled', false); if (needUpdate) { $items.filter(sprintf('[value="%s"]', index)).prop('checked', checked); } if ($items.filter(':checked').length <= this.options.minimumCountColumns) { $items.filter(':checked').prop('disabled', true); } } }; // PUBLIC FUNCTION DEFINITION // ======================= BootstrapTable.prototype.resetView = function (params) { var that = this, header = this.header; if (params && params.height) { this.options.height = params.height; } this.$selectAll.prop('checked', this.$selectItem.length > 0 && this.$selectItem.length === this.$selectItem.filter(':checked').length); if (this.options.height) { var toolbarHeight = +this.$toolbar.children().outerHeight(true), paginationHeight = +this.$pagination.children().outerHeight(true), height = this.options.height - toolbarHeight - paginationHeight; this.$container.find('.fixed-table-container').css('height', height + 'px'); } if (this.options.cardView) { // remove the element css that.$el.css('margin-top', '0'); that.$container.find('.fixed-table-container').css('padding-bottom', '0'); return; } if (this.options.showHeader && this.options.height) { this.resetHeader(); } else { this.trigger('post-header'); } if (this.options.height && this.options.showHeader) { this.$container.find('.fixed-table-container').css('padding-bottom', '37px'); } }; BootstrapTable.prototype.getData = function () { return (this.searchText || !$.isEmptyObject(this.filterColumns)) ? this.data : this.options.data; }; BootstrapTable.prototype.load = function (data) { // #431: support pagination if (this.options.sidePagination === 'server') { this.options.totalRows = data.total; data = data.rows; } this.initData(data); this.initSearch(); this.initPagination(); this.initBody(); }; BootstrapTable.prototype.append = function (data) { this.initData(data, 'append'); this.initSearch(); this.initPagination(); this.initBody(true); }; BootstrapTable.prototype.prepend = function (data) { this.initData(data, 'prepend'); this.initSearch(); this.initPagination(); this.initBody(true); }; BootstrapTable.prototype.remove = function (params) { var len = this.options.data.length, i, row; if (!params.hasOwnProperty('field') || !params.hasOwnProperty('values')) { return; } for (i = len - 1; i >= 0; i--) { row = this.options.data[i]; if (!row.hasOwnProperty(params.field)) { return; } if ($.inArray(row[params.field], params.values) !== -1) { this.options.data.splice(i, 1); } } if (len === this.options.data.length) { return; } this.initSearch(); this.initPagination(); this.initBody(true); }; BootstrapTable.prototype.insertRow = function (params) { if (!params.hasOwnProperty('index') || !params.hasOwnProperty('row')) { return; } this.data.splice(params.index, 0, params.row); this.initBody(true); }; BootstrapTable.prototype.updateRow = function (params) { if (!params.hasOwnProperty('index') || !params.hasOwnProperty('row')) { return; } $.extend(this.data[params.index], params.row); this.initBody(true); }; BootstrapTable.prototype.mergeCells = function (options) { var row = options.index, col = $.inArray(options.field, this.header.fields), rowspan = options.rowspan || 1, colspan = options.colspan || 1, i, j, $tr = this.$body.find('tr'), $td = $tr.eq(row).find('td').eq(col); if (row < 0 || col < 0 || row >= this.data.length) { return; } for (i = row; i < row + rowspan; i++) { for (j = col; j < col + colspan; j++) { $tr.eq(i).find('td').eq(j).hide(); } } $td.attr('rowspan', rowspan).attr('colspan', colspan).show(); }; BootstrapTable.prototype.getOptions = function () { return this.options; }; BootstrapTable.prototype.getSelections = function () { var that = this; return $.grep(this.data, function (row) { return row[that.header.stateField]; }); }; BootstrapTable.prototype.checkAll = function () { this.checkAll_(true); }; BootstrapTable.prototype.uncheckAll = function () { this.checkAll_(false); }; BootstrapTable.prototype.checkAll_ = function (checked) { var rows; if(!checked) { rows = this.getSelections(); } this.$selectItem.filter(':enabled').prop('checked', checked); this.updateRows(checked); this.updateSelected(); if(checked) { rows = this.getSelections(); } this.trigger(checked ? 'check-all' : 'uncheck-all', rows); }; BootstrapTable.prototype.check = function (index) { this.check_(true, index); }; BootstrapTable.prototype.uncheck = function (index) { this.check_(false, index); }; BootstrapTable.prototype.check_ = function (checked, index) { this.$selectItem.filter(sprintf('[data-index="%s"]', index)).prop('checked', checked); this.data[index][this.header.stateField] = checked; this.updateSelected(); this.trigger(checked ? 'check' : 'uncheck', this.data[index]); }; BootstrapTable.prototype.checkBy = function (obj) { this.checkBy_(true, obj); }; BootstrapTable.prototype.uncheckBy = function (obj) { this.checkBy_(false, obj); }; BootstrapTable.prototype.checkBy_ = function (checked, obj) { if(!obj.hasOwnProperty('field') || !obj.hasOwnProperty('values')) { return; } var that = this; $.each(this.data, function (index, row) { if(!row.hasOwnProperty(obj.field)) { return false; } if($.inArray(row[obj.field], obj.values) != -1) { that.$selectItem.filter(sprintf('[data-index="%s"]', index)).prop('checked', checked); row[that.header.stateField] = checked; that.trigger(checked ? 'check' : 'uncheck', row); } }); this.updateSelected(); }; BootstrapTable.prototype.destroy = function () { this.$el.insertBefore(this.$container); $(this.options.toolbar).insertBefore(this.$el); this.$container.next().remove(); this.$container.remove(); this.$el.html(this.$el_.html()) .css('margin-top', '0') .attr('class', this.$el_.attr('class') || ''); // reset the class }; BootstrapTable.prototype.showLoading = function () { this.$loading.show(); }; BootstrapTable.prototype.hideLoading = function () { this.$loading.hide(); }; BootstrapTable.prototype.togglePagination = function () { this.options.pagination = !this.options.pagination; var button = this.$toolbar.find('button[name="paginationSwitch"] i'); if (this.options.pagination) { button.attr("class", this.options.iconsPrefix + " " + this.options.icons.paginationSwitchDown); } else { button.attr("class", this.options.iconsPrefix + " " + this.options.icons.paginationSwitchUp); } this.updatePagination(); }; BootstrapTable.prototype.refresh = function (params) { if (params && params.url) { this.options.url = params.url; this.options.pageNumber = 1; } this.initServer(params && params.silent, params && params.query); }; BootstrapTable.prototype.showColumn = function (field) { this.toggleColumn(getFieldIndex(this.options.columns, field), true, true); }; BootstrapTable.prototype.hideColumn = function (field) { this.toggleColumn(getFieldIndex(this.options.columns, field), false, true); }; BootstrapTable.prototype.filterBy = function (columns) { this.filterColumns = $.isEmptyObject(columns) ? {}: columns; this.options.pageNumber = 1; this.initSearch(); this.updatePagination(); }; BootstrapTable.prototype.scrollTo = function (value) { var $tbody = this.$container.find('.fixed-table-body'); if (typeof value === 'string') { value = value === 'bottom' ? $tbody[0].scrollHeight : 0; } if (typeof value === 'number') { $tbody.scrollTop(value); } }; BootstrapTable.prototype.selectPage = function (page) { if (page > 0 && page <= this.options.totalPages) { this.options.pageNumber = page; this.updatePagination(); } }; BootstrapTable.prototype.prevPage = function () { if (this.options.pageNumber > 1) { this.options.pageNumber--; this.updatePagination(); } }; BootstrapTable.prototype.nextPage = function () { if (this.options.pageNumber < this.options.totalPages) { this.options.pageNumber++; this.updatePagination(); } }; BootstrapTable.prototype.toggleView = function () { this.options.cardView = !this.options.cardView; this.initHeader(); this.initBody(); }; // BOOTSTRAP TABLE PLUGIN DEFINITION // ======================= var allowedMethods = [ 'getOptions', 'getSelections', 'getData', 'load', 'append', 'prepend', 'remove', 'insertRow', 'updateRow', 'mergeCells', 'checkAll', 'uncheckAll', 'check', 'uncheck', 'checkBy', 'uncheckBy', 'refresh', 'resetView', 'destroy', 'showLoading', 'hideLoading', 'showColumn', 'hideColumn', 'filterBy', 'scrollTo', 'selectPage', 'prevPage', 'nextPage', 'togglePagination', 'toggleView' ]; $.fn.bootstrapTable = function (option, _relatedTarget) { var value; this.each(function () { var $this = $(this), data = $this.data('bootstrap.table'), options = $.extend({}, BootstrapTable.DEFAULTS, $this.data(), typeof option === 'object' && option); if (typeof option === 'string') { if ($.inArray(option, allowedMethods) < 0) { throw "Unknown method: " + option; } if (!data) { return; } value = data[option](_relatedTarget); if (option === 'destroy') { $this.removeData('bootstrap.table'); } } if (!data) { $this.data('bootstrap.table', (data = new BootstrapTable(this, options))); } }); return typeof value === 'undefined' ? this : value; }; $.fn.bootstrapTable.Constructor = BootstrapTable; $.fn.bootstrapTable.defaults = BootstrapTable.DEFAULTS; $.fn.bootstrapTable.columnDefaults = BootstrapTable.COLUMN_DEFAULTS; $.fn.bootstrapTable.locales = BootstrapTable.LOCALES; $.fn.bootstrapTable.methods = allowedMethods; // BOOTSTRAP TABLE INIT // ======================= $(function () { $('[data-toggle="table"]').bootstrapTable(); }); }(jQuery);
Update checkBy/uncheckBy methods.
src/bootstrap-table.js
Update checkBy/uncheckBy methods.
<ide><path>rc/bootstrap-table.js <ide> } <ide> <ide> var that = this; <del> $.each(this.data, function (index, row) { <del> if(!row.hasOwnProperty(obj.field)) { <add> $.each(this.options.data, function (index, row) { <add> if (!row.hasOwnProperty(obj.field)) { <ide> return false; <ide> } <del> if($.inArray(row[obj.field], obj.values) != -1) { <add> if ($.inArray(row[obj.field], obj.values) !== -1) { <ide> that.$selectItem.filter(sprintf('[data-index="%s"]', index)).prop('checked', checked); <ide> row[that.header.stateField] = checked; <ide> that.trigger(checked ? 'check' : 'uncheck', row);
Java
apache-2.0
83d28b375432ca818a1e0e684b11f4f069d64b51
0
alibaba/java-dns-cache-manipulator,alibaba/java-dns-cache-manipulator,alibaba/java-dns-cache-manipulator,alibaba/java-dns-cache-manipulator
package com.alibaba.dcm.internal; import com.alibaba.dcm.DnsCache; import com.alibaba.dcm.DnsCacheEntry; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.alibaba.dcm.internal.InetAddressCacheUtilCommons.NEVER_EXPIRATION; import static com.alibaba.dcm.internal.InetAddressCacheUtilCommons.toInetAddressArray; /** * Util class to manipulate dns cache for {@code JDK 8-}. * <p> * dns cache is {@link InetAddress.Cache#cache} in {@link InetAddress#addressCache}. * <p> * <b>Caution</b>: <br> * Manipulation on {@link InetAddress#addressCache} <strong>MUST</strong> * be guarded by {@link InetAddress#addressCache} to avoid multithreading problem, * you can see the implementation of {@link InetAddress} to confirm this * (<b><i>See Also</i></b> lists key code of {@link InetAddress} related to this point). * * @author Jerry Lee (oldratlee at gmail dot com) * @see InetAddress * @see InetAddress#addressCache * @see InetAddress.CacheEntry * @see InetAddress#cacheInitIfNeeded() * @see InetAddress#cacheAddresses(String, InetAddress[], boolean) */ public final class InetAddressCacheUtilForJava8Minus { /** * Need convert host to lowercase, see {@link InetAddress#cacheAddresses(String, InetAddress[], boolean)}. */ public static void setInetAddressCache(String host, String[] ips, long expireMillis) throws UnknownHostException, IllegalAccessException, InstantiationException, InvocationTargetException, ClassNotFoundException, NoSuchFieldException { host = host.toLowerCase(); long expiration = expireMillis == NEVER_EXPIRATION ? NEVER_EXPIRATION : System.currentTimeMillis() + expireMillis; Object entry = newCacheEntry(host, ips, expiration); synchronized (getAddressCacheOfInetAddress()) { getCache().put(host, entry); getNegativeCache().remove(host); } } private static Object newCacheEntry(String host, String[] ips, long expiration) throws UnknownHostException, ClassNotFoundException, IllegalAccessException, InvocationTargetException, InstantiationException { // InetAddress.CacheEntry has only one constructor return getConstructorOfInetAddress$CacheEntry().newInstance(toInetAddressArray(host, ips), expiration); } /** * {@link InetAddress.CacheEntry#CacheEntry} */ private static volatile Constructor<?> constructorOfInetAddress$CacheEntry = null; private static Constructor<?> getConstructorOfInetAddress$CacheEntry() throws ClassNotFoundException { if (constructorOfInetAddress$CacheEntry != null) { return constructorOfInetAddress$CacheEntry; } synchronized (InetAddressCacheUtilCommons.class) { if (constructorOfInetAddress$CacheEntry != null) { // double check return constructorOfInetAddress$CacheEntry; } final String className = "java.net.InetAddress$CacheEntry"; final Class<?> clazz = Class.forName(className); // InetAddress.CacheEntry has only one constructor: // - for jdk 6, constructor signature is CacheEntry(Object address, long expiration) // - for jdk 7/8, constructor signature is CacheEntry(InetAddress[] addresses, long expiration) // // code in jdk 6: // https://hg.openjdk.java.net/jdk6/jdk6/jdk/file/8deef18bb749/src/share/classes/java/net/InetAddress.java#l739 // code in jdk 7: // https://hg.openjdk.java.net/jdk7u/jdk7u/jdk/file/4dd5e486620d/src/share/classes/java/net/InetAddress.java#l742 // code in jdk 8: // https://hg.openjdk.java.net/jdk8u/jdk8u/jdk/file/45e4e636b757/src/share/classes/java/net/InetAddress.java#l748 final Constructor<?> constructor = clazz.getDeclaredConstructors()[0]; constructor.setAccessible(true); constructorOfInetAddress$CacheEntry = constructor; return constructor; } } public static void removeInetAddressCache(String host) throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { host = host.toLowerCase(); synchronized (getAddressCacheOfInetAddress()) { getCache().remove(host); getNegativeCache().remove(host); } } /** * @return {@link InetAddress.Cache#cache} in {@link InetAddress#addressCache} */ @GuardedBy("getAddressCacheOfInetAddress()") private static Map<String, Object> getCache() throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { return getCacheOfInetAddress$Cache0(getAddressCacheOfInetAddress()); } /** * @return {@link InetAddress.Cache#cache} in {@link InetAddress#negativeCache} */ @GuardedBy("getAddressCacheOfInetAddress()") private static Map<String, Object> getNegativeCache() throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { return getCacheOfInetAddress$Cache0(getNegativeCacheOfInetAddress()); } /** * {@link InetAddress.Cache.cache} */ private static volatile Field cacheMapFieldOfInetAddress$Cache = null; @SuppressWarnings("unchecked") private static Map<String, Object> getCacheOfInetAddress$Cache0(Object inetAddressCache) throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { if (cacheMapFieldOfInetAddress$Cache == null) { synchronized (InetAddressCacheUtilForJava8Minus.class) { if (cacheMapFieldOfInetAddress$Cache == null) { // double check final Class<?> clazz = Class.forName("java.net.InetAddress$Cache"); final Field f = clazz.getDeclaredField("cache"); f.setAccessible(true); cacheMapFieldOfInetAddress$Cache = f; } } } return (Map<String, Object>) cacheMapFieldOfInetAddress$Cache.get(inetAddressCache); } /** * @return {@link InetAddress#addressCache} */ private static Object getAddressCacheOfInetAddress() throws NoSuchFieldException, IllegalAccessException { return getAddressCacheAndNegativeCacheOfInetAddress0()[0]; } /** * @return {@link InetAddress#negativeCache} */ private static Object getNegativeCacheOfInetAddress() throws NoSuchFieldException, IllegalAccessException { return getAddressCacheAndNegativeCacheOfInetAddress0()[1]; } private static volatile Object[] ADDRESS_CACHE_AND_NEGATIVE_CACHE = null; /** * @return {@link InetAddress#addressCache} and {@link InetAddress#negativeCache} */ private static Object[] getAddressCacheAndNegativeCacheOfInetAddress0() throws NoSuchFieldException, IllegalAccessException { if (ADDRESS_CACHE_AND_NEGATIVE_CACHE == null) { synchronized (InetAddressCacheUtilForJava8Minus.class) { if (ADDRESS_CACHE_AND_NEGATIVE_CACHE == null) { // double check final Field cacheField = InetAddress.class.getDeclaredField("addressCache"); cacheField.setAccessible(true); final Field negativeCacheField = InetAddress.class.getDeclaredField("negativeCache"); negativeCacheField.setAccessible(true); ADDRESS_CACHE_AND_NEGATIVE_CACHE = new Object[]{ cacheField.get(InetAddress.class), negativeCacheField.get(InetAddress.class) }; } } } return ADDRESS_CACHE_AND_NEGATIVE_CACHE; } @Nullable public static DnsCacheEntry getInetAddressCache(String host) throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { host = host.toLowerCase(); final Object cacheEntry; synchronized (getAddressCacheOfInetAddress()) { cacheEntry = getCache().get(host); } if (null == cacheEntry) return null; final DnsCacheEntry dnsCacheEntry = inetAddress$CacheEntry2DnsCacheEntry(host, cacheEntry); if (isDnsCacheEntryExpired(dnsCacheEntry.getHost())) return null; return dnsCacheEntry; } private static boolean isDnsCacheEntryExpired(String host) { return null == host || "0.0.0.0".equals(host); } public static DnsCache listInetAddressCache() throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { final Map<String, Object> cache; final Map<String, Object> negativeCache; synchronized (getAddressCacheOfInetAddress()) { cache = new HashMap<String, Object>(getCache()); negativeCache = new HashMap<String, Object>(getNegativeCache()); } return new DnsCache(convert(cache), convert(negativeCache)); } private static List<DnsCacheEntry> convert(Map<String, Object> cache) throws IllegalAccessException { final List<DnsCacheEntry> ret = new ArrayList<DnsCacheEntry>(); for (Map.Entry<String, Object> entry : cache.entrySet()) { final String host = entry.getKey(); if (isDnsCacheEntryExpired(host)) { // exclude expired entries! continue; } ret.add(inetAddress$CacheEntry2DnsCacheEntry(host, entry.getValue())); } return ret; } private static volatile Field expirationFieldOfInetAddress$CacheEntry = null; private static volatile Field addressesFieldOfInetAddress$CacheEntry = null; private static DnsCacheEntry inetAddress$CacheEntry2DnsCacheEntry(String host, Object entry) throws IllegalAccessException { if (expirationFieldOfInetAddress$CacheEntry == null || addressesFieldOfInetAddress$CacheEntry == null) { synchronized (InetAddressCacheUtilForJava8Minus.class) { if (expirationFieldOfInetAddress$CacheEntry == null) { // double check Class<?> cacheEntryClass = entry.getClass(); // InetAddress.CacheEntry has 2 filed: // - for jdk 6, address and expiration // - for jdk 7+, addresses(*renamed* from 6!) and expiration // code in jdk 6: // https://hg.openjdk.java.net/jdk6/jdk6/jdk/file/8deef18bb749/src/share/classes/java/net/InetAddress.java#l739 // code in jdk 7: // https://hg.openjdk.java.net/jdk7u/jdk7u/jdk/file/4dd5e486620d/src/share/classes/java/net/InetAddress.java#l742 // code in jdk 8: // https://hg.openjdk.java.net/jdk8u/jdk8u/jdk/file/45e4e636b757/src/share/classes/java/net/InetAddress.java#l748 final Field[] fields = cacheEntryClass.getDeclaredFields(); for (Field field : fields) { final String name = field.getName(); if (name.equals("expiration")) { field.setAccessible(true); expirationFieldOfInetAddress$CacheEntry = field; } else if (name.startsWith("address")) { // use startWith so works for jdk 6 and jdk 7+ field.setAccessible(true); addressesFieldOfInetAddress$CacheEntry = field; } else { throw new IllegalStateException("JDK add new Field " + name + " for class InetAddress.CacheEntry, report issue for dns-cache-manipulator lib!"); } } } } } long expiration = expirationFieldOfInetAddress$CacheEntry.getLong(entry); InetAddress[] addresses = (InetAddress[]) addressesFieldOfInetAddress$CacheEntry.get(entry); String[] ips = new String[addresses.length]; for (int i = 0; i < addresses.length; i++) { ips[i] = addresses[i].getHostAddress(); } return new DnsCacheEntry(host, ips, expiration); } public static void clearInetAddressCache() throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { synchronized (getAddressCacheOfInetAddress()) { getCache().clear(); getNegativeCache().clear(); } } private InetAddressCacheUtilForJava8Minus() { } }
library/src/main/java/com/alibaba/dcm/internal/InetAddressCacheUtilForJava8Minus.java
package com.alibaba.dcm.internal; import com.alibaba.dcm.DnsCache; import com.alibaba.dcm.DnsCacheEntry; import javax.annotation.Nullable; import javax.annotation.concurrent.GuardedBy; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.alibaba.dcm.internal.InetAddressCacheUtilCommons.NEVER_EXPIRATION; import static com.alibaba.dcm.internal.InetAddressCacheUtilCommons.toInetAddressArray; /** * Util class to manipulate dns cache for {@code JDK 8-}. * <p> * dns cache is {@link InetAddress.Cache#cache} in {@link InetAddress#addressCache}. * <p> * <b>Caution</b>: <br> * Manipulation on {@link InetAddress#addressCache} <strong>MUST</strong> * be guarded by {@link InetAddress#addressCache} to avoid multithreading problem, * you can see the implementation of {@link InetAddress} to confirm this * (<b><i>See Also</i></b> lists key code of {@link InetAddress} related to this point). * * @author Jerry Lee (oldratlee at gmail dot com) * @see InetAddress * @see InetAddress#addressCache * @see InetAddress.CacheEntry * @see InetAddress#cacheInitIfNeeded() * @see InetAddress#cacheAddresses(String, InetAddress[], boolean) */ public final class InetAddressCacheUtilForJava8Minus { /** * Need convert host to lowercase, see {@link InetAddress#cacheAddresses(String, InetAddress[], boolean)}. */ public static void setInetAddressCache(String host, String[] ips, long expireMillis) throws UnknownHostException, IllegalAccessException, InstantiationException, InvocationTargetException, ClassNotFoundException, NoSuchFieldException { host = host.toLowerCase(); long expiration = expireMillis == NEVER_EXPIRATION ? NEVER_EXPIRATION : System.currentTimeMillis() + expireMillis; Object entry = newCacheEntry(host, ips, expiration); synchronized (getAddressCacheOfInetAddress()) { getCache().put(host, entry); getNegativeCache().remove(host); } } private static Object newCacheEntry(String host, String[] ips, long expiration) throws UnknownHostException, ClassNotFoundException, IllegalAccessException, InvocationTargetException, InstantiationException { // InetAddress.CacheEntry has only one constructor return getConstructorOfInetAddress$CacheEntry().newInstance(toInetAddressArray(host, ips), expiration); } /** * {@link InetAddress.CacheEntry#CacheEntry} */ private static volatile Constructor<?> constructorOfInetAddress$CacheEntry = null; private static Constructor<?> getConstructorOfInetAddress$CacheEntry() throws ClassNotFoundException { if (constructorOfInetAddress$CacheEntry != null) { return constructorOfInetAddress$CacheEntry; } synchronized (InetAddressCacheUtilCommons.class) { if (constructorOfInetAddress$CacheEntry != null) { // double check return constructorOfInetAddress$CacheEntry; } final String className = "java.net.InetAddress$CacheEntry"; final Class<?> clazz = Class.forName(className); // InetAddress.CacheEntry has only one constructor: // - for jdk 6, constructor signature is CacheEntry(Object address, long expiration) // - for jdk 7/8, constructor signature is CacheEntry(InetAddress[] addresses, long expiration) // // code in jdk 6: // https://hg.openjdk.java.net/jdk6/jdk6/jdk/file/8deef18bb749/src/share/classes/java/net/InetAddress.java#l739 // code in jdk 7: // https://hg.openjdk.java.net/jdk7u/jdk7u/jdk/file/4dd5e486620d/src/share/classes/java/net/InetAddress.java#l742 // code in jdk 8: // https://hg.openjdk.java.net/jdk8u/jdk8u/jdk/file/45e4e636b757/src/share/classes/java/net/InetAddress.java#l748 final Constructor<?> constructor = clazz.getDeclaredConstructors()[0]; constructor.setAccessible(true); constructorOfInetAddress$CacheEntry = constructor; return constructor; } } public static void removeInetAddressCache(String host) throws NoSuchFieldException, IllegalAccessException { host = host.toLowerCase(); synchronized (getAddressCacheOfInetAddress()) { getCache().remove(host); getNegativeCache().remove(host); } } /** * @return {@link InetAddress.Cache#cache} in {@link InetAddress#addressCache} */ @GuardedBy("getAddressCacheOfInetAddress()") private static Map<String, Object> getCache() throws NoSuchFieldException, IllegalAccessException { return getCacheOfInetAddress$Cache0(getAddressCacheOfInetAddress()); } /** * @return {@link InetAddress.Cache#cache} in {@link InetAddress#negativeCache} */ @GuardedBy("getAddressCacheOfInetAddress()") private static Map<String, Object> getNegativeCache() throws NoSuchFieldException, IllegalAccessException { return getCacheOfInetAddress$Cache0(getNegativeCacheOfInetAddress()); } @SuppressWarnings("unchecked") private static Map<String, Object> getCacheOfInetAddress$Cache0(Object inetAddressCache) throws NoSuchFieldException, IllegalAccessException { Class<?> clazz = inetAddressCache.getClass(); final Field cacheMapField = clazz.getDeclaredField("cache"); cacheMapField.setAccessible(true); return (Map<String, Object>) cacheMapField.get(inetAddressCache); } /** * @return {@link InetAddress#addressCache} */ private static Object getAddressCacheOfInetAddress() throws NoSuchFieldException, IllegalAccessException { return getAddressCacheAndNegativeCacheOfInetAddress0()[0]; } /** * @return {@link InetAddress#negativeCache} */ private static Object getNegativeCacheOfInetAddress() throws NoSuchFieldException, IllegalAccessException { return getAddressCacheAndNegativeCacheOfInetAddress0()[1]; } private static volatile Object[] ADDRESS_CACHE_AND_NEGATIVE_CACHE = null; /** * @return {@link InetAddress#addressCache} and {@link InetAddress#negativeCache} */ private static Object[] getAddressCacheAndNegativeCacheOfInetAddress0() throws NoSuchFieldException, IllegalAccessException { if (ADDRESS_CACHE_AND_NEGATIVE_CACHE == null) { synchronized (InetAddressCacheUtilForJava8Minus.class) { if (ADDRESS_CACHE_AND_NEGATIVE_CACHE == null) { // double check final Field cacheField = InetAddress.class.getDeclaredField("addressCache"); cacheField.setAccessible(true); final Field negativeCacheField = InetAddress.class.getDeclaredField("negativeCache"); negativeCacheField.setAccessible(true); ADDRESS_CACHE_AND_NEGATIVE_CACHE = new Object[]{ cacheField.get(InetAddress.class), negativeCacheField.get(InetAddress.class) }; } } } return ADDRESS_CACHE_AND_NEGATIVE_CACHE; } @Nullable public static DnsCacheEntry getInetAddressCache(String host) throws NoSuchFieldException, IllegalAccessException { host = host.toLowerCase(); final Object cacheEntry; synchronized (getAddressCacheOfInetAddress()) { cacheEntry = getCache().get(host); } if (null == cacheEntry) return null; final DnsCacheEntry dnsCacheEntry = inetAddress$CacheEntry2DnsCacheEntry(host, cacheEntry); if (isDnsCacheEntryExpired(dnsCacheEntry.getHost())) return null; return dnsCacheEntry; } private static boolean isDnsCacheEntryExpired(String host) { return null == host || "0.0.0.0".equals(host); } public static DnsCache listInetAddressCache() throws NoSuchFieldException, IllegalAccessException { final Map<String, Object> cache; final Map<String, Object> negativeCache; synchronized (getAddressCacheOfInetAddress()) { cache = new HashMap<String, Object>(getCache()); negativeCache = new HashMap<String, Object>(getNegativeCache()); } return new DnsCache(convert(cache), convert(negativeCache)); } private static List<DnsCacheEntry> convert(Map<String, Object> cache) throws IllegalAccessException { final List<DnsCacheEntry> ret = new ArrayList<DnsCacheEntry>(); for (Map.Entry<String, Object> entry : cache.entrySet()) { final String host = entry.getKey(); if (isDnsCacheEntryExpired(host)) { // exclude expired entries! continue; } ret.add(inetAddress$CacheEntry2DnsCacheEntry(host, entry.getValue())); } return ret; } private static volatile Field expirationFieldOfInetAddress$CacheEntry = null; private static volatile Field addressesFieldOfInetAddress$CacheEntry = null; private static DnsCacheEntry inetAddress$CacheEntry2DnsCacheEntry(String host, Object entry) throws IllegalAccessException { if (expirationFieldOfInetAddress$CacheEntry == null || addressesFieldOfInetAddress$CacheEntry == null) { synchronized (InetAddressCacheUtilForJava8Minus.class) { if (expirationFieldOfInetAddress$CacheEntry == null) { // double check Class<?> cacheEntryClass = entry.getClass(); // InetAddress.CacheEntry has 2 filed: // - for jdk 6, address and expiration // - for jdk 7+, addresses(*renamed* from 6!) and expiration // code in jdk 6: // https://hg.openjdk.java.net/jdk6/jdk6/jdk/file/8deef18bb749/src/share/classes/java/net/InetAddress.java#l739 // code in jdk 7: // https://hg.openjdk.java.net/jdk7u/jdk7u/jdk/file/4dd5e486620d/src/share/classes/java/net/InetAddress.java#l742 // code in jdk 8: // https://hg.openjdk.java.net/jdk8u/jdk8u/jdk/file/45e4e636b757/src/share/classes/java/net/InetAddress.java#l748 final Field[] fields = cacheEntryClass.getDeclaredFields(); for (Field field : fields) { final String name = field.getName(); if (name.equals("expiration")) { field.setAccessible(true); expirationFieldOfInetAddress$CacheEntry = field; } else if (name.startsWith("address")) { // use startWith so works for jdk 6 and jdk 7+ field.setAccessible(true); addressesFieldOfInetAddress$CacheEntry = field; } else { throw new IllegalStateException("JDK add new Field " + name + " for class InetAddress.CacheEntry, report issue for dns-cache-manipulator lib!"); } } } } } long expiration = expirationFieldOfInetAddress$CacheEntry.getLong(entry); InetAddress[] addresses = (InetAddress[]) addressesFieldOfInetAddress$CacheEntry.get(entry); String[] ips = new String[addresses.length]; for (int i = 0; i < addresses.length; i++) { ips[i] = addresses[i].getHostAddress(); } return new DnsCacheEntry(host, ips, expiration); } public static void clearInetAddressCache() throws NoSuchFieldException, IllegalAccessException { synchronized (getAddressCacheOfInetAddress()) { getCache().clear(); getNegativeCache().clear(); } } private InetAddressCacheUtilForJava8Minus() { } }
! performance improve: cache reflection objects(field/constructor) cache `InetAddress.Cache.cache` field
library/src/main/java/com/alibaba/dcm/internal/InetAddressCacheUtilForJava8Minus.java
! performance improve: cache reflection objects(field/constructor)
<ide><path>ibrary/src/main/java/com/alibaba/dcm/internal/InetAddressCacheUtilForJava8Minus.java <ide> } <ide> <ide> public static void removeInetAddressCache(String host) <del> throws NoSuchFieldException, IllegalAccessException { <add> throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { <ide> host = host.toLowerCase(); <ide> <ide> synchronized (getAddressCacheOfInetAddress()) { <ide> */ <ide> @GuardedBy("getAddressCacheOfInetAddress()") <ide> private static Map<String, Object> getCache() <del> throws NoSuchFieldException, IllegalAccessException { <add> throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { <ide> return getCacheOfInetAddress$Cache0(getAddressCacheOfInetAddress()); <ide> } <ide> <ide> */ <ide> @GuardedBy("getAddressCacheOfInetAddress()") <ide> private static Map<String, Object> getNegativeCache() <del> throws NoSuchFieldException, IllegalAccessException { <add> throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { <ide> return getCacheOfInetAddress$Cache0(getNegativeCacheOfInetAddress()); <ide> } <add> <add> <add> /** <add> * {@link InetAddress.Cache.cache} <add> */ <add> private static volatile Field cacheMapFieldOfInetAddress$Cache = null; <ide> <ide> @SuppressWarnings("unchecked") <ide> private static Map<String, Object> getCacheOfInetAddress$Cache0(Object inetAddressCache) <del> throws NoSuchFieldException, IllegalAccessException { <del> Class<?> clazz = inetAddressCache.getClass(); <del> <del> final Field cacheMapField = clazz.getDeclaredField("cache"); <del> cacheMapField.setAccessible(true); <del> return (Map<String, Object>) cacheMapField.get(inetAddressCache); <add> throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { <add> if (cacheMapFieldOfInetAddress$Cache == null) { <add> synchronized (InetAddressCacheUtilForJava8Minus.class) { <add> if (cacheMapFieldOfInetAddress$Cache == null) { // double check <add> final Class<?> clazz = Class.forName("java.net.InetAddress$Cache"); <add> final Field f = clazz.getDeclaredField("cache"); <add> f.setAccessible(true); <add> cacheMapFieldOfInetAddress$Cache = f; <add> } <add> } <add> } <add> <add> return (Map<String, Object>) cacheMapFieldOfInetAddress$Cache.get(inetAddressCache); <ide> } <ide> <ide> /** <ide> <ide> @Nullable <ide> public static DnsCacheEntry getInetAddressCache(String host) <del> throws NoSuchFieldException, IllegalAccessException { <add> throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { <ide> host = host.toLowerCase(); <ide> <ide> final Object cacheEntry; <ide> } <ide> <ide> public static DnsCache listInetAddressCache() <del> throws NoSuchFieldException, IllegalAccessException { <add> throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { <ide> final Map<String, Object> cache; <ide> final Map<String, Object> negativeCache; <ide> synchronized (getAddressCacheOfInetAddress()) { <ide> return new DnsCacheEntry(host, ips, expiration); <ide> } <ide> <del> public static void clearInetAddressCache() throws NoSuchFieldException, IllegalAccessException { <add> public static void clearInetAddressCache() throws NoSuchFieldException, IllegalAccessException, ClassNotFoundException { <ide> synchronized (getAddressCacheOfInetAddress()) { <ide> getCache().clear(); <ide> getNegativeCache().clear();
JavaScript
isc
2d925010e0b43f7567af287482b04ef4f3db444d
0
schutm/nyc,istanbuljs/nyc,bcoe/nyc,isaacs/nyc
/* global describe, it */ var _ = require('lodash') var path = require('path') var bin = path.resolve(__dirname, '../bin/nyc') var fixturesCLI = path.resolve(__dirname, './fixtures/cli') var fakebin = path.resolve(fixturesCLI, 'fakebin') var fixturesHooks = path.resolve(__dirname, './fixtures/hooks') var fs = require('fs') var glob = require('glob') var isWindows = require('is-windows')() var rimraf = require('rimraf') var spawn = require('child_process').spawn require('chai').should() // beforeEach rimraf.sync(path.resolve(fakebin, 'node')) rimraf.sync(path.resolve(fakebin, 'npm')) rimraf.sync(path.resolve(fixturesCLI, 'subdir', 'output-dir')) describe('the nyc cli', function () { var env = { PATH: process.env.PATH } describe('--include', function () { it('can be used to limit bin to instrumenting specific files', function (done) { var args = [bin, '--all', '--include', 'half-covered.js', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/half-covered\.js/) stdout.should.not.match(/half-covered-failing\.js/) stdout.should.not.match(/test\.js/) done() }) }) }) describe('--exclude', function () { it('should allow default exclude rules to be overridden', function (done) { var args = [bin, '--all', '--exclude', '**/half-covered.js', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.not.match(/half-covered\.js/) stdout.should.match(/test\.js/) done() }) }) }) describe('--check-coverage', function () { it('fails when the expected coverage is below a threshold', function (done) { var args = [bin, '--check-coverage', '--lines', '51', process.execPath, './half-covered.js'] var message = 'ERROR: Coverage for lines (50%) does not meet global threshold (51%)' var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stderr = '' proc.stderr.on('data', function (chunk) { stderr += chunk }) proc.on('close', function (code) { code.should.not.equal(0) stderr.trim().should.equal(message) done() }) }) // https://github.com/istanbuljs/nyc/issues/384 it('fails when check-coverage command is used rather than flag', function (done) { var args = [bin, 'check-coverage', '--lines', '51', process.execPath, './half-covered.js'] var message = 'ERROR: Coverage for lines (50%) does not meet global threshold (51%)' var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stderr = '' proc.stderr.on('data', function (chunk) { stderr += chunk }) proc.on('close', function (code) { code.should.not.equal(0) stderr.trim().should.equal(message) done() }) }) it('succeeds when the expected coverage is above a threshold', function (done) { var args = [bin, '--check-coverage', '--lines', '49', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) done() }) }) // https://github.com/bcoe/nyc/issues/209 it('fails in any case when the underlying test failed', function (done) { var args = [bin, '--check-coverage', '--lines', '49', process.execPath, './half-covered-failing.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.not.equal(0) done() }) }) it('fails when the expected file coverage is below a threshold', function (done) { var args = [bin, '--check-coverage', '--lines', '51', '--per-file', process.execPath, './half-covered.js'] var matcher = RegExp('ERROR: Coverage for lines \\(50%\\) does not meet threshold \\(51%\\) for .+half-covered.js') var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stderr = '' proc.stderr.on('data', function (chunk) { stderr += chunk }) proc.on('close', function (code) { code.should.not.equal(0) stderr.trim().should.match(matcher) done() }) }) }) // https://github.com/bcoe/nyc/issues/190 describe('running "npm test"', function () { it('can run "npm test" which directly invokes a test file', function (done) { var args = [bin, 'npm', 'test'] var directory = path.resolve(fixturesCLI, 'run-npm-test') var proc = spawn(process.execPath, args, { cwd: directory, env: env }) proc.on('close', function (code) { code.should.equal(0) done() }) }) it('can run "npm test" which indirectly invokes a test file', function (done) { var args = [bin, 'npm', 'test'] var directory = path.resolve(fixturesCLI, 'run-npm-test-recursive') var proc = spawn(process.execPath, args, { cwd: directory, env: env }) proc.on('close', function (code) { code.should.equal(0) done() }) }) function writeFakeNPM (shebang) { var targetPath = path.resolve(fakebin, 'npm') var source = fs.readFileSync(path.resolve(fakebin, 'npm-template.js')) fs.writeFileSync(targetPath, '#!' + shebang + '\n' + source) fs.chmodSync(targetPath, 493) // 0o755 } it('can run "npm test", absolute shebang edition', function (done) { if (isWindows) return done() writeFakeNPM(process.execPath) var args = [bin, 'npm', 'test'] var directory = path.resolve(fixturesCLI, 'run-npm-test-recursive') var proc = spawn(process.execPath, args, { cwd: directory, env: { PATH: fakebin + ':' + env.PATH } }) proc.on('close', function (code) { code.should.equal(0) done() }) }) it('can run "npm test", weird bash+dirname shebang edition', function (done) { if (isWindows) return done() // This string is taken verbatim from tools/install.py in Node core v5.x writeFakeNPM('/bin/sh\n// 2>/dev/null; exec "`dirname "$0"`/node" "$0" "$@"') fs.symlinkSync(process.execPath, path.resolve(fakebin, 'node')) var args = [bin, 'npm', 'test'] var directory = path.resolve(fixturesCLI, 'run-npm-test-recursive') var proc = spawn(process.execPath, args, { cwd: directory, env: { PATH: fakebin + ':' + env.PATH } }) proc.on('close', function (code) { code.should.equal(0) done() }) }) }) describe('configuration', function () { it('passes configuration via environment variables', function (done) { var args = [ bin, '--silent', '--require=mkdirp', '--include=env.js', '--exclude=batman.js', '--extension=.js', '--cache=true', '--source-map=true', process.execPath, './env.js' ] var expected = { instrumenter: './lib/instrumenters/istanbul', silent: true, cache: true, sourceMap: true } var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) var env = JSON.parse(stdout) var config = JSON.parse(env.NYC_CONFIG, null, 2) config.should.include(expected) config.include.should.include('env.js') config.exclude.should.include('batman.js') config.extension.should.include('.js') done() }) }) it('allows package.json configuration to be overridden with command line args', function (done) { var args = [bin, '--reporter=text-lcov', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/SF:.*half-covered\.js/) done() }) }) describe('.nycrc', function () { var cwd = path.resolve(fixturesCLI, './nycrc') it('loads configuration from package.json and .nycrc', function (done) { var args = [bin, process.execPath, './index.js'] var proc = spawn(process.execPath, args, { cwd: cwd, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/SF:.*index\.js/) stdout.should.not.match(/SF:.*ignore\.js/) done() }) }) it('allows .nycrc configuration to be overridden with command line args', function (done) { var args = [bin, '--exclude=foo.js', process.execPath, './index.js'] var proc = spawn(process.execPath, args, { cwd: cwd, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/SF:.*index\.js/) stdout.should.match(/SF:.*ignore\.js/) done() }) }) }) }) describe('coverage', function () { if (parseInt(process.versions.node.split('.')[0]) < 4) return it('reports appropriate coverage information for es6 source files', function (done) { var args = [bin, '--reporter=lcov', '--reporter=text', process.execPath, './es6.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) // we should miss covering the appropriate lines. stdout.should.match(/11,16,17/) done() }) }) }) describe('instrument', function () { describe('no output folder', function () { it('allows a single file to be instrumented', function (done) { var args = [bin, 'instrument', './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/path:"\.\/half-covered\.js"/) done() }) }) it('allows a directory of files to be instrumented', function (done) { var args = [bin, 'instrument', './'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/half-covered\.js"/) stdout.should.match(/half-covered-failing\.js"/) stdout.should.not.match(/spawn\.js"/) done() }) }) it('works in directories without a package.json', function (done) { var args = [bin, 'instrument', './input-dir', './output-dir'] var subdir = path.resolve(fixturesCLI, 'subdir') var proc = spawn(process.execPath, args, { cwd: subdir, env: env }) proc.on('exit', function (code) { code.should.equal(0) var target = path.resolve(subdir, 'output-dir', 'index.js') fs.readFileSync(target, 'utf8') .should.match(/console.log\('Hello, World!'\)/) done() }) }) }) describe('output folder specified', function () { it('allows a single file to be instrumented', function (done) { var args = [bin, 'instrument', './half-covered.js', './output'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) var files = fs.readdirSync(path.resolve(fixturesCLI, './output')) files.length.should.equal(1) files.should.include('half-covered.js') rimraf.sync(path.resolve(fixturesCLI, 'output')) done() }) }) it('allows a directory of files to be instrumented', function (done) { var args = [bin, 'instrument', './', './output'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) var files = fs.readdirSync(path.resolve(fixturesCLI, './output')) files.should.include('env.js') files.should.include('es6.js') rimraf.sync(path.resolve(fixturesCLI, 'output')) done() }) }) }) }) describe('hooks', function () { it('provides coverage for requireJS and AMD modules', function (done) { var args = [bin, process.execPath, './index.js'] var proc = spawn(process.execPath, args, { cwd: fixturesHooks, env: process.env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/ipsum\.js/) stdout.should.match(/lorem\.js/) done() }) }) }) describe('args', function () { it('does not interpret args intended for instrumented bin', function (done) { var args = [bin, '--silent', process.execPath, 'args.js', '--help', '--version'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) var args = JSON.parse(stdout) args.should.include('--help') args.should.include('--version') args.should.not.include('--silent') done() }) }) it('interprets first args after -- as Node.js execArgv', function (done) { var args = [bin, '--', '--expose-gc', path.resolve(fixturesCLI, 'gc.js')] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.setEncoding('utf8') proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.include('still running') done() }) }) }) describe('--show-process-tree', function () { it('displays a tree of spawned processes', function (done) { var args = [bin, '--show-process-tree', process.execPath, 'selfspawn-fibonacci.js', '5'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.setEncoding('utf8') proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(new RegExp( 'nyc\n' + '└─┬.*selfspawn-fibonacci.js 5\n' + ' │.* % Lines\n' + ' ├─┬.*selfspawn-fibonacci.js 4\n' + ' │ │.* % Lines\n' + ' │ ├─┬.*selfspawn-fibonacci.js 3\n' + ' │ │ │.* % Lines\n' + ' │ │ ├──.*selfspawn-fibonacci.js 2\n' + ' │ │ │.* % Lines\n' + ' │ │ └──.*selfspawn-fibonacci.js 1\n' + ' │ │ .* % Lines\n' + ' │ └──.*selfspawn-fibonacci.js 2\n' + ' │ .* % Lines\n' + ' └─┬.*selfspawn-fibonacci.js 3\n' + ' │.* % Lines\n' + ' ├──.*selfspawn-fibonacci.js 2\n' + ' │.* % Lines\n' + ' └──.*selfspawn-fibonacci.js 1\n' + ' .* % Lines\n' )) done() }) }) it('doesn’t create the temp directory for process info files when not present', function (done) { var args = [bin, process.execPath, 'selfspawn-fibonacci.js', '5'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('exit', function (code) { code.should.equal(0) fs.stat(path.resolve(fixturesCLI, '.nyc_output', 'processinfo'), function (err, stat) { err.code.should.equal('ENOENT') done() }) }) }) }) describe('noop instrumenter', function () { it('setting instrument to "false" configures noop instrumenter', function (done) { var args = [ bin, '--silent', '--no-instrument', '--no-source-map', process.execPath, './env.js' ] var expected = { silent: true, instrument: false, sourceMap: false, instrumenter: './lib/instrumenters/noop' } var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) var env = JSON.parse(stdout) var config = JSON.parse(env.NYC_CONFIG, null, 2) config.should.include(expected) done() }) }) describe('--all', function () { it('extracts coverage headers from unexecuted files', function (done) { var nycOutput = path.resolve(fixturesCLI, '.nyc_output') rimraf.sync(nycOutput) var args = [ bin, '--all', '--silent', '--no-instrument', '--no-source-map', process.execPath, // any file other than external-instrument.js, which we // want to ensure has its header loaded. './env.js' ] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) glob(nycOutput + '/*.json', function (_err, files) { // we should have extracted the coverage header from external-instrumenter.js. var coverage = {} files.forEach(function (file) { _.assign(coverage, JSON.parse( fs.readFileSync(file, 'utf-8') )) }) Object.keys(coverage).should.include('./external-instrumenter.js') // we should not have executed file, so all counts sould be 0. var sum = 0 ;Object.keys(coverage['./external-instrumenter.js'].s).forEach(function (k) { sum += coverage['./external-instrumenter.js'].s[k] }) sum.should.equal(0) return done() }) }) }) }) }) it('allows an alternative cache folder to be specified', function (done) { var args = [bin, '--cache-dir=./foo-cache', '--cache=true', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) // we should have created ./foo-cache rather // than the default ./node_modules/.cache. fs.readdirSync(path.resolve( fixturesCLI, './foo-cache' )).length.should.equal(1) rimraf.sync(path.resolve(fixturesCLI, 'foo-cache')) done() }) }) // see: https://github.com/istanbuljs/nyc/issues/563 it('does not create .cache folder if cache is "false"', function (done) { var args = [bin, '--cache=false', process.execPath, './index.js'] var proc = spawn(process.execPath, args, { cwd: process.cwd(), env: env }) rimraf.sync('./node_modules/.cache') proc.on('close', function (code) { code.should.equal(0) fs.existsSync('./node_modules/.cache').should.equal(false) done() }) }) it('allows alternative high and low watermarks to be configured', function (done) { var args = [ bin, '--watermarks.lines=90', '--watermarks.lines=100', '--watermarks.statements=30', '--watermarks.statements=40', '--cache=true', process.execPath, './half-covered.js' ] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: { PATH: process.env.PATH, FORCE_COLOR: true } }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) // 50% line coverage is below our low watermark (so it's red). stdout.should.match(/\[31;1m\W+50\W+/) // 50% statement coverage is above our high-watermark (so it's green). stdout.should.match(/\[32;1m\W+50\W+/) done() }) }) })
test/nyc-bin.js
/* global describe, it */ var _ = require('lodash') var path = require('path') var bin = path.resolve(__dirname, '../bin/nyc') var fixturesCLI = path.resolve(__dirname, './fixtures/cli') var fakebin = path.resolve(fixturesCLI, 'fakebin') var fixturesHooks = path.resolve(__dirname, './fixtures/hooks') var fs = require('fs') var glob = require('glob') var isWindows = require('is-windows')() var rimraf = require('rimraf') var spawn = require('child_process').spawn require('chai').should() // beforeEach rimraf.sync(path.resolve(fakebin, 'node')) rimraf.sync(path.resolve(fakebin, 'npm')) rimraf.sync(path.resolve(fixturesCLI, 'subdir', 'output-dir')) describe('the nyc cli', function () { var env = { PATH: process.env.PATH } describe('--include', function () { it('can be used to limit bin to instrumenting specific files', function (done) { var args = [bin, '--all', '--include', 'half-covered.js', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/half-covered\.js/) stdout.should.not.match(/half-covered-failing\.js/) stdout.should.not.match(/test\.js/) done() }) }) }) describe('--exclude', function () { it('should allow default exclude rules to be overridden', function (done) { var args = [bin, '--all', '--exclude', '**/half-covered.js', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.not.match(/half-covered\.js/) stdout.should.match(/test\.js/) done() }) }) }) describe('--check-coverage', function () { it('fails when the expected coverage is below a threshold', function (done) { var args = [bin, '--check-coverage', '--lines', '51', process.execPath, './half-covered.js'] var message = 'ERROR: Coverage for lines (50%) does not meet global threshold (51%)' var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stderr = '' proc.stderr.on('data', function (chunk) { stderr += chunk }) proc.on('close', function (code) { code.should.not.equal(0) stderr.trim().should.equal(message) done() }) }) // https://github.com/istanbuljs/nyc/issues/384 it('fails when check-coverage command is used rather than flag', function (done) { var args = [bin, 'check-coverage', '--lines', '51', process.execPath, './half-covered.js'] var message = 'ERROR: Coverage for lines (50%) does not meet global threshold (51%)' var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stderr = '' proc.stderr.on('data', function (chunk) { stderr += chunk }) proc.on('close', function (code) { code.should.not.equal(0) stderr.trim().should.equal(message) done() }) }) it('succeeds when the expected coverage is above a threshold', function (done) { var args = [bin, '--check-coverage', '--lines', '49', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) done() }) }) // https://github.com/bcoe/nyc/issues/209 it('fails in any case when the underlying test failed', function (done) { var args = [bin, '--check-coverage', '--lines', '49', process.execPath, './half-covered-failing.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.not.equal(0) done() }) }) it('fails when the expected file coverage is below a threshold', function (done) { var args = [bin, '--check-coverage', '--lines', '51', '--per-file', process.execPath, './half-covered.js'] var matcher = RegExp('ERROR: Coverage for lines \\(50%\\) does not meet threshold \\(51%\\) for .+/half-covered.js') var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stderr = '' proc.stderr.on('data', function (chunk) { stderr += chunk }) proc.on('close', function (code) { code.should.not.equal(0) stderr.trim().should.match(matcher) done() }) }) }) // https://github.com/bcoe/nyc/issues/190 describe('running "npm test"', function () { it('can run "npm test" which directly invokes a test file', function (done) { var args = [bin, 'npm', 'test'] var directory = path.resolve(fixturesCLI, 'run-npm-test') var proc = spawn(process.execPath, args, { cwd: directory, env: env }) proc.on('close', function (code) { code.should.equal(0) done() }) }) it('can run "npm test" which indirectly invokes a test file', function (done) { var args = [bin, 'npm', 'test'] var directory = path.resolve(fixturesCLI, 'run-npm-test-recursive') var proc = spawn(process.execPath, args, { cwd: directory, env: env }) proc.on('close', function (code) { code.should.equal(0) done() }) }) function writeFakeNPM (shebang) { var targetPath = path.resolve(fakebin, 'npm') var source = fs.readFileSync(path.resolve(fakebin, 'npm-template.js')) fs.writeFileSync(targetPath, '#!' + shebang + '\n' + source) fs.chmodSync(targetPath, 493) // 0o755 } it('can run "npm test", absolute shebang edition', function (done) { if (isWindows) return done() writeFakeNPM(process.execPath) var args = [bin, 'npm', 'test'] var directory = path.resolve(fixturesCLI, 'run-npm-test-recursive') var proc = spawn(process.execPath, args, { cwd: directory, env: { PATH: fakebin + ':' + env.PATH } }) proc.on('close', function (code) { code.should.equal(0) done() }) }) it('can run "npm test", weird bash+dirname shebang edition', function (done) { if (isWindows) return done() // This string is taken verbatim from tools/install.py in Node core v5.x writeFakeNPM('/bin/sh\n// 2>/dev/null; exec "`dirname "$0"`/node" "$0" "$@"') fs.symlinkSync(process.execPath, path.resolve(fakebin, 'node')) var args = [bin, 'npm', 'test'] var directory = path.resolve(fixturesCLI, 'run-npm-test-recursive') var proc = spawn(process.execPath, args, { cwd: directory, env: { PATH: fakebin + ':' + env.PATH } }) proc.on('close', function (code) { code.should.equal(0) done() }) }) }) describe('configuration', function () { it('passes configuration via environment variables', function (done) { var args = [ bin, '--silent', '--require=mkdirp', '--include=env.js', '--exclude=batman.js', '--extension=.js', '--cache=true', '--source-map=true', process.execPath, './env.js' ] var expected = { instrumenter: './lib/instrumenters/istanbul', silent: true, cache: true, sourceMap: true } var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) var env = JSON.parse(stdout) var config = JSON.parse(env.NYC_CONFIG, null, 2) config.should.include(expected) config.include.should.include('env.js') config.exclude.should.include('batman.js') config.extension.should.include('.js') done() }) }) it('allows package.json configuration to be overridden with command line args', function (done) { var args = [bin, '--reporter=text-lcov', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/SF:.*half-covered\.js/) done() }) }) describe('.nycrc', function () { var cwd = path.resolve(fixturesCLI, './nycrc') it('loads configuration from package.json and .nycrc', function (done) { var args = [bin, process.execPath, './index.js'] var proc = spawn(process.execPath, args, { cwd: cwd, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/SF:.*index\.js/) stdout.should.not.match(/SF:.*ignore\.js/) done() }) }) it('allows .nycrc configuration to be overridden with command line args', function (done) { var args = [bin, '--exclude=foo.js', process.execPath, './index.js'] var proc = spawn(process.execPath, args, { cwd: cwd, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/SF:.*index\.js/) stdout.should.match(/SF:.*ignore\.js/) done() }) }) }) }) describe('coverage', function () { if (parseInt(process.versions.node.split('.')[0]) < 4) return it('reports appropriate coverage information for es6 source files', function (done) { var args = [bin, '--reporter=lcov', '--reporter=text', process.execPath, './es6.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) // we should miss covering the appropriate lines. stdout.should.match(/11,16,17/) done() }) }) }) describe('instrument', function () { describe('no output folder', function () { it('allows a single file to be instrumented', function (done) { var args = [bin, 'instrument', './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/path:"\.\/half-covered\.js"/) done() }) }) it('allows a directory of files to be instrumented', function (done) { var args = [bin, 'instrument', './'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/half-covered\.js"/) stdout.should.match(/half-covered-failing\.js"/) stdout.should.not.match(/spawn\.js"/) done() }) }) it('works in directories without a package.json', function (done) { var args = [bin, 'instrument', './input-dir', './output-dir'] var subdir = path.resolve(fixturesCLI, 'subdir') var proc = spawn(process.execPath, args, { cwd: subdir, env: env }) proc.on('exit', function (code) { code.should.equal(0) var target = path.resolve(subdir, 'output-dir', 'index.js') fs.readFileSync(target, 'utf8') .should.match(/console.log\('Hello, World!'\)/) done() }) }) }) describe('output folder specified', function () { it('allows a single file to be instrumented', function (done) { var args = [bin, 'instrument', './half-covered.js', './output'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) var files = fs.readdirSync(path.resolve(fixturesCLI, './output')) files.length.should.equal(1) files.should.include('half-covered.js') rimraf.sync(path.resolve(fixturesCLI, 'output')) done() }) }) it('allows a directory of files to be instrumented', function (done) { var args = [bin, 'instrument', './', './output'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) var files = fs.readdirSync(path.resolve(fixturesCLI, './output')) files.should.include('env.js') files.should.include('es6.js') rimraf.sync(path.resolve(fixturesCLI, 'output')) done() }) }) }) }) describe('hooks', function () { it('provides coverage for requireJS and AMD modules', function (done) { var args = [bin, process.execPath, './index.js'] var proc = spawn(process.execPath, args, { cwd: fixturesHooks, env: process.env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(/ipsum\.js/) stdout.should.match(/lorem\.js/) done() }) }) }) describe('args', function () { it('does not interpret args intended for instrumented bin', function (done) { var args = [bin, '--silent', process.execPath, 'args.js', '--help', '--version'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) var args = JSON.parse(stdout) args.should.include('--help') args.should.include('--version') args.should.not.include('--silent') done() }) }) it('interprets first args after -- as Node.js execArgv', function (done) { var args = [bin, '--', '--expose-gc', path.resolve(fixturesCLI, 'gc.js')] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.setEncoding('utf8') proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.include('still running') done() }) }) }) describe('--show-process-tree', function () { it('displays a tree of spawned processes', function (done) { var args = [bin, '--show-process-tree', process.execPath, 'selfspawn-fibonacci.js', '5'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.setEncoding('utf8') proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) stdout.should.match(new RegExp( 'nyc\n' + '└─┬.*selfspawn-fibonacci.js 5\n' + ' │.* % Lines\n' + ' ├─┬.*selfspawn-fibonacci.js 4\n' + ' │ │.* % Lines\n' + ' │ ├─┬.*selfspawn-fibonacci.js 3\n' + ' │ │ │.* % Lines\n' + ' │ │ ├──.*selfspawn-fibonacci.js 2\n' + ' │ │ │.* % Lines\n' + ' │ │ └──.*selfspawn-fibonacci.js 1\n' + ' │ │ .* % Lines\n' + ' │ └──.*selfspawn-fibonacci.js 2\n' + ' │ .* % Lines\n' + ' └─┬.*selfspawn-fibonacci.js 3\n' + ' │.* % Lines\n' + ' ├──.*selfspawn-fibonacci.js 2\n' + ' │.* % Lines\n' + ' └──.*selfspawn-fibonacci.js 1\n' + ' .* % Lines\n' )) done() }) }) it('doesn’t create the temp directory for process info files when not present', function (done) { var args = [bin, process.execPath, 'selfspawn-fibonacci.js', '5'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('exit', function (code) { code.should.equal(0) fs.stat(path.resolve(fixturesCLI, '.nyc_output', 'processinfo'), function (err, stat) { err.code.should.equal('ENOENT') done() }) }) }) }) describe('noop instrumenter', function () { it('setting instrument to "false" configures noop instrumenter', function (done) { var args = [ bin, '--silent', '--no-instrument', '--no-source-map', process.execPath, './env.js' ] var expected = { silent: true, instrument: false, sourceMap: false, instrumenter: './lib/instrumenters/noop' } var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) var env = JSON.parse(stdout) var config = JSON.parse(env.NYC_CONFIG, null, 2) config.should.include(expected) done() }) }) describe('--all', function () { it('extracts coverage headers from unexecuted files', function (done) { var nycOutput = path.resolve(fixturesCLI, '.nyc_output') rimraf.sync(nycOutput) var args = [ bin, '--all', '--silent', '--no-instrument', '--no-source-map', process.execPath, // any file other than external-instrument.js, which we // want to ensure has its header loaded. './env.js' ] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) glob(nycOutput + '/*.json', function (_err, files) { // we should have extracted the coverage header from external-instrumenter.js. var coverage = {} files.forEach(function (file) { _.assign(coverage, JSON.parse( fs.readFileSync(file, 'utf-8') )) }) Object.keys(coverage).should.include('./external-instrumenter.js') // we should not have executed file, so all counts sould be 0. var sum = 0 ;Object.keys(coverage['./external-instrumenter.js'].s).forEach(function (k) { sum += coverage['./external-instrumenter.js'].s[k] }) sum.should.equal(0) return done() }) }) }) }) }) it('allows an alternative cache folder to be specified', function (done) { var args = [bin, '--cache-dir=./foo-cache', '--cache=true', process.execPath, './half-covered.js'] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: env }) proc.on('close', function (code) { code.should.equal(0) // we should have created ./foo-cache rather // than the default ./node_modules/.cache. fs.readdirSync(path.resolve( fixturesCLI, './foo-cache' )).length.should.equal(1) rimraf.sync(path.resolve(fixturesCLI, 'foo-cache')) done() }) }) // see: https://github.com/istanbuljs/nyc/issues/563 it('does not create .cache folder if cache is "false"', function (done) { var args = [bin, '--cache=false', process.execPath, './index.js'] var proc = spawn(process.execPath, args, { cwd: process.cwd(), env: env }) rimraf.sync('./node_modules/.cache') proc.on('close', function (code) { code.should.equal(0) fs.existsSync('./node_modules/.cache').should.equal(false) done() }) }) it('allows alternative high and low watermarks to be configured', function (done) { var args = [ bin, '--watermarks.lines=90', '--watermarks.lines=100', '--watermarks.statements=30', '--watermarks.statements=40', '--cache=true', process.execPath, './half-covered.js' ] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: { PATH: process.env.PATH, FORCE_COLOR: true } }) var stdout = '' proc.stdout.on('data', function (chunk) { stdout += chunk }) proc.on('close', function (code) { code.should.equal(0) // 50% line coverage is below our low watermark (so it's red). stdout.should.match(/\[31;1m\W+50\W+/) // 50% statement coverage is above our high-watermark (so it's green). stdout.should.match(/\[32;1m\W+50\W+/) done() }) }) })
chore: attempt to fix appveyor (#623)
test/nyc-bin.js
chore: attempt to fix appveyor (#623)
<ide><path>est/nyc-bin.js <ide> <ide> it('fails when the expected file coverage is below a threshold', function (done) { <ide> var args = [bin, '--check-coverage', '--lines', '51', '--per-file', process.execPath, './half-covered.js'] <del> var matcher = RegExp('ERROR: Coverage for lines \\(50%\\) does not meet threshold \\(51%\\) for .+/half-covered.js') <add> var matcher = RegExp('ERROR: Coverage for lines \\(50%\\) does not meet threshold \\(51%\\) for .+half-covered.js') <ide> <ide> var proc = spawn(process.execPath, args, { <ide> cwd: fixturesCLI,
JavaScript
mit
0aded781f3974d6013054cee0600b1152fe5b546
0
njfrost/skyview
const chalk = require('chalk') const figlet = require('figlet') const fetch = require('node-fetch') const dscovrMocks = require('../mocks/dscovr.mock.js') const fs = require('fs') const baseApi = 'https://epic.gsfc.nasa.gov/api' const baseArchive = 'https://epic.gsfc.nasa.gov/archive' function getJson() { return fetch(`${baseApi}/natural`).then(function(res) { return res.json() }) } function getImageUrl(formattedDate, name, requestColor, requestFormat) { const color = requestColor || 'natural' const format = requestFormat || 'png' return `${baseArchive}/${color}/${formattedDate}/${format}/${name}.${format}` } function getImage() { return getJson().then(function(json) { var date = json[0].date var image = json[0].image const formattedDate = date.slice(0,10).replace(new RegExp('-', 'g'), '/') const imageUrl = getImageUrl(formattedDate, image) console.log(`downloading ${imageUrl}`) return fetch(imageUrl) .then(function(res) { const filePath = `${process.cwd()}/${image}.png` console.log(chalk.yellow(figlet.textSync('Earth!', { horizontalLayout: 'full' }))) console.log(chalk.green(`Downloading ${imageUrl} ....`)) const writestream = fs.createWriteStream(filePath) console.log(chalk.green(`Saved file to ${filePath}`)) const stream = res.body.pipe(writestream) return new Promise(function (resolve, reject) { stream.on('close', resolve) stream.on('error', reject) }) }) }) } module.exports = { getJson, getImage, baseApi, baseArchive, }
dscovr/dscovr.js
const chalk = require('chalk') const figlet = require('figlet') const fetch = require('node-fetch') const dscovrMocks = require('../mocks/dscovr.mock.js') const fs = require('fs') const baseApi = 'https://epic.gsfc.nasa.gov/api' const baseArchive = 'https://epic.gsfc.nasa.gov/archive' function getJson() { return fetch(`${baseApi}/natural`).then(function(res) { return res.json() }) } function getImage() { return getJson().then(function(json) { var date = json[0].date var image = json[0].image const formattedDate = date.slice(0,10).replace(new RegExp('-', 'g'), '/') const imageUrl = `${baseArchive}/natural/${formattedDate}/png/${image}.png` console.log(`downloading ${imageUrl}`) return fetch(imageUrl) .then(function(res) { const filePath = `${process.cwd()}/${image}.png` console.log(chalk.yellow(figlet.textSync('Earth!', { horizontalLayout: 'full' }))) console.log(chalk.green(`Downloading ${imageUrl} ....`)) const writestream = fs.createWriteStream(filePath) console.log(chalk.green(`Saved file to ${filePath}`)) const stream = res.body.pipe(writestream) return new Promise(function (resolve, reject) { stream.on('close', resolve) stream.on('error', reject) }) }) }) } module.exports = { getJson, getImage, baseApi, baseArchive, }
extract get image url
dscovr/dscovr.js
extract get image url
<ide><path>scovr/dscovr.js <ide> }) <ide> } <ide> <add>function getImageUrl(formattedDate, name, requestColor, requestFormat) { <add> const color = requestColor || 'natural' <add> const format = requestFormat || 'png' <add> return `${baseArchive}/${color}/${formattedDate}/${format}/${name}.${format}` <add>} <add> <ide> function getImage() { <ide> return getJson().then(function(json) { <ide> var date = json[0].date <ide> var image = json[0].image <ide> const formattedDate = date.slice(0,10).replace(new RegExp('-', 'g'), '/') <del> const imageUrl = `${baseArchive}/natural/${formattedDate}/png/${image}.png` <add> const imageUrl = getImageUrl(formattedDate, image) <ide> console.log(`downloading ${imageUrl}`) <ide> return fetch(imageUrl) <ide> .then(function(res) { <ide> stream.on('close', resolve) <ide> stream.on('error', reject) <ide> }) <del> <del> <ide> }) <ide> }) <ide> }
Java
mit
33c790a8c8f6dae06cd3793ae4a58a2b7dc6c109
0
opensciencegrid/rsvprocess,opensciencegrid/rsvprocess
package rsv.process.control; import java.io.BufferedWriter; import java.io.FileWriter; import java.sql.SQLException; import java.util.ArrayList; import java.util.Scanner; import java.util.TreeMap; import java.util.TreeSet; import org.apache.log4j.Logger; import rsv.process.Configuration; import rsv.process.model.MetricDataModel; import rsv.process.model.OIMModel; import rsv.process.model.record.MetricData; import rsv.process.model.record.Resource; import rsv.process.model.record.VirtualOrganization; public class RSVVOMatrix implements RSVProcess{ private static final Logger logger = Logger.getLogger(RSVVOMatrix.class); //some psudo-static configs private static final Integer vosupported_metric_id = 19; private static final String vodetail_token = "# List of VOs this site claims to support "; public int run(String args[]) { int ret = RSVMain.exitcode_ok; try { StringBuffer xml = new StringBuffer(); xml.append("<?xml version=\"1.0\"?>\n"); xml.append("<VOMembership>\n"); MetricDataModel mdm = new MetricDataModel(); OIMModel oim = new OIMModel(); TreeMap<Integer/*vo_id*/, TreeSet<Integer/*resource_id*/>> void2resources = new TreeMap<Integer, TreeSet<Integer>>(); //grouped by Resource ID xml.append("<ResourceGrouped>"); OIMModel.ResourcesType resources = oim.getResources(); for(Integer resource_id : resources.keySet()) { StringBuffer errors = new StringBuffer(); String voinfo = null; TreeMap<Integer, String> volist = null; //ignore resource with no service ArrayList<Integer/*service_id*/> services = oim.getResourceService(resource_id); if(services.size() == 0) continue; MetricDataModel.LMDType mset = mdm.getLastMetricDataSet(resource_id, null); MetricData m = mset.get(vosupported_metric_id); if(m == null) { errors.append("No VO Detail reported for this resource through RSV\n"); } else { voinfo = m.fetchDetail(); try { if(voinfo.substring(0, vodetail_token.length()).compareTo(vodetail_token) == 0) { String vos = voinfo.substring(vodetail_token.length()); Scanner s = new Scanner(vos); volist = new TreeMap<Integer, String>(); while (s.hasNext()) { //lookup the VOID String voname = s.next(); Integer vo_id = oim.lookupVOID(voname); if(vo_id == null) { errors.append("Unknown VO name: "+ voname + " found\n"); } else { volist.put(vo_id, voname); //store the entry to void2resources (for later output of VO grouped list) TreeSet<Integer> rs = void2resources.get(vo_id); if(rs == null) { rs = new TreeSet<Integer>(); void2resources.put(vo_id, rs); } if(!rs.contains(resource_id)) { rs.add(resource_id); } } } } } catch(StringIndexOutOfBoundsException e) { errors.append("Invalid VO Detail: " + e.getMessage() + "\n"); } } //output XML Resource r = resources.get(resource_id); xml.append("<Resource id=\""+resource_id+"\">"); xml.append("<Name>"+r.getName()+"</Name>"); xml.append("<MembersRaw><![CDATA["+voinfo+"]]></MembersRaw>"); xml.append("<ErrorMessage><![CDATA["+errors.toString()+"]]></ErrorMessage>"); xml.append("<Members>"); if(volist != null) { for(Integer vo : volist.keySet()) { xml.append("<VO id=\""+vo+"\">"+volist.get(vo)+"</VO>"); } } xml.append("</Members>"); xml.append("</Resource>\n"); } xml.append("</ResourceGrouped>"); //grouped by VO xml.append("<VOGrouped>"); for(Integer vo_id : void2resources.keySet()) { TreeSet<Integer> rs = void2resources.get(vo_id); xml.append("<VO id=\""+vo_id+"\">"); VirtualOrganization vo = oim.lookupVO(vo_id); xml.append("<Name>"+vo.getShortName()+"</Name>"); xml.append("<Members>"); for(Integer resource_id : rs) { xml.append("<Resource>"); Resource r = resources.get(resource_id); xml.append("<ResourceID>" + r.getID() + "</ResourceID>"); xml.append("<ResourceName>" + r.getName() + "</ResourceName>"); xml.append("</Resource>"); } xml.append("</Members>"); xml.append("</VO>"); } xml.append("</VOGrouped>"); xml.append("</VOMembership>\n"); //output XML to specified location try{ logger.debug("Wriging generated XML to : " + Configuration.vomatrix_xml_cache); FileWriter fstream = new FileWriter(RSVMain.conf.getProperty(Configuration.vomatrix_xml_cache)); BufferedWriter out = new BufferedWriter(fstream); out.write(xml.toString()); out.close(); } catch (Exception e) { logger.error("Caught exception while outputing xml cache", e); ret = RSVMain.exitcode_error; } } catch (SQLException e) { logger.error("SQL Error", e); ret = RSVMain.exitcode_error; } return ret; } }
src/rsv/process/control/RSVVOMatrix.java
package rsv.process.control; import java.io.BufferedWriter; import java.io.FileWriter; import java.sql.SQLException; import java.util.ArrayList; import java.util.Scanner; import java.util.TreeMap; import java.util.TreeSet; import org.apache.log4j.Logger; import rsv.process.Configuration; import rsv.process.model.MetricDataModel; import rsv.process.model.OIMModel; import rsv.process.model.record.MetricData; import rsv.process.model.record.Resource; import rsv.process.model.record.VirtualOrganization; public class RSVVOMatrix implements RSVProcess{ private static final Logger logger = Logger.getLogger(RSVVOMatrix.class); //some psudo-static configs private static final Integer vosupported_metric_id = 19; private static final String vodetail_token = "# List of VOs this site claims to support "; public int run(String args[]) { int ret = RSVMain.exitcode_ok; try { String xml = "<?xml version=\"1.0\"?>\n"; xml += "<VOMembership>\n"; MetricDataModel mdm = new MetricDataModel(); OIMModel oim = new OIMModel(); TreeMap<Integer/*vo_id*/, TreeSet<Integer/*resource_id*/>> void2resources = new TreeMap<Integer, TreeSet<Integer>>(); //grouped by Resource ID xml += "<ResourceGrouped>"; OIMModel.ResourcesType resources = oim.getResources(); for(Integer resource_id : resources.keySet()) { StringBuffer errors = new StringBuffer(); String voinfo = null; TreeMap<Integer, String> volist = null; //ignore resource with no service ArrayList<Integer/*service_id*/> services = oim.getResourceService(resource_id); if(services.size() == 0) continue; MetricDataModel.LMDType mset = mdm.getLastMetricDataSet(resource_id, null); MetricData m = mset.get(vosupported_metric_id); if(m == null) { errors.append("No VO Detail reported for this resource through RSV\n"); } else { voinfo = m.fetchDetail(); try { if(voinfo.substring(0, vodetail_token.length()).compareTo(vodetail_token) == 0) { String vos = voinfo.substring(vodetail_token.length()); Scanner s = new Scanner(vos); volist = new TreeMap<Integer, String>(); while (s.hasNext()) { //lookup the VOID String voname = s.next(); Integer vo_id = oim.lookupVOID(voname); if(vo_id == null) { errors.append("Unknown VO name: "+ voname + " found\n"); } else { volist.put(vo_id, voname); //store the entry to void2resources (for later output of VO grouped list) TreeSet<Integer> rs = void2resources.get(vo_id); if(rs == null) { rs = new TreeSet<Integer>(); void2resources.put(vo_id, rs); } if(!rs.contains(resource_id)) { rs.add(resource_id); } } } } } catch(StringIndexOutOfBoundsException e) { errors.append("Invalid VO Detail: " + e.getMessage() + "\n"); } } //output XML Resource r = resources.get(resource_id); xml += "<Resource id=\""+resource_id+"\">"; xml += "<Name>"+r.getName()+"</Name>"; xml += "<MembersRaw><![CDATA["+voinfo+"]]></MembersRaw>"; xml += "<ErrorMessage><![CDATA["+errors.toString()+"]]></ErrorMessage>"; xml += "<Members>"; if(volist != null) { for(Integer vo : volist.keySet()) { xml += "<VO id=\""+vo+"\">"+volist.get(vo)+"</VO>"; } } xml += "</Members>"; xml += "</Resource>\n"; } xml += "</ResourceGrouped>"; //grouped by VO xml += "<VOGrouped>"; for(Integer vo_id : void2resources.keySet()) { TreeSet<Integer> rs = void2resources.get(vo_id); xml += "<VO id=\""+vo_id+"\">"; VirtualOrganization vo = oim.lookupVO(vo_id); xml += "<Name>"+vo.getShortName()+"</Name>"; xml += "<Members>"; for(Integer resource_id : rs) { xml += "<Resource>"; Resource r = resources.get(resource_id); xml += "<ResourceID>" + r.getID() + "</ResourceID>"; xml += "<ResourceName>" + r.getName() + "</ResourceName>"; xml += "</Resource>"; } xml += "</Members>"; xml += "</VO>"; } xml += "</VOGrouped>"; xml += "</VOMembership>\n"; //output XML to specified location try{ logger.debug("Wriging generated XML to : " + Configuration.vomatrix_xml_cache); FileWriter fstream = new FileWriter(RSVMain.conf.getProperty(Configuration.vomatrix_xml_cache)); BufferedWriter out = new BufferedWriter(fstream); out.write(xml); out.close(); } catch (Exception e) { logger.error("Caught exception while outputing xml cache", e); ret = RSVMain.exitcode_error; } } catch (SQLException e) { logger.error("SQL Error", e); ret = RSVMain.exitcode_error; } return ret; } }
(patched) Switched from string to string buffer
src/rsv/process/control/RSVVOMatrix.java
(patched) Switched from string to string buffer
<ide><path>rc/rsv/process/control/RSVVOMatrix.java <ide> int ret = RSVMain.exitcode_ok; <ide> <ide> try { <del> String xml = "<?xml version=\"1.0\"?>\n"; <del> xml += "<VOMembership>\n"; <add> StringBuffer xml = new StringBuffer(); <add> xml.append("<?xml version=\"1.0\"?>\n"); <add> xml.append("<VOMembership>\n"); <ide> MetricDataModel mdm = new MetricDataModel(); <ide> OIMModel oim = new OIMModel(); <ide> TreeMap<Integer/*vo_id*/, TreeSet<Integer/*resource_id*/>> void2resources = new TreeMap<Integer, TreeSet<Integer>>(); <ide> <ide> //grouped by Resource ID <del> xml += "<ResourceGrouped>"; <add> xml.append("<ResourceGrouped>"); <ide> OIMModel.ResourcesType resources = oim.getResources(); <ide> for(Integer resource_id : resources.keySet()) { <ide> <ide> <ide> //output XML <ide> Resource r = resources.get(resource_id); <del> xml += "<Resource id=\""+resource_id+"\">"; <del> xml += "<Name>"+r.getName()+"</Name>"; <del> xml += "<MembersRaw><![CDATA["+voinfo+"]]></MembersRaw>"; <del> xml += "<ErrorMessage><![CDATA["+errors.toString()+"]]></ErrorMessage>"; <del> xml += "<Members>"; <add> xml.append("<Resource id=\""+resource_id+"\">"); <add> xml.append("<Name>"+r.getName()+"</Name>"); <add> xml.append("<MembersRaw><![CDATA["+voinfo+"]]></MembersRaw>"); <add> xml.append("<ErrorMessage><![CDATA["+errors.toString()+"]]></ErrorMessage>"); <add> xml.append("<Members>"); <ide> if(volist != null) { <ide> for(Integer vo : volist.keySet()) { <del> xml += "<VO id=\""+vo+"\">"+volist.get(vo)+"</VO>"; <add> xml.append("<VO id=\""+vo+"\">"+volist.get(vo)+"</VO>"); <ide> } <ide> } <del> xml += "</Members>"; <del> xml += "</Resource>\n"; <add> xml.append("</Members>"); <add> xml.append("</Resource>\n"); <ide> } <del> xml += "</ResourceGrouped>"; <add> xml.append("</ResourceGrouped>"); <ide> <ide> //grouped by VO <del> xml += "<VOGrouped>"; <add> xml.append("<VOGrouped>"); <ide> for(Integer vo_id : void2resources.keySet()) { <ide> TreeSet<Integer> rs = void2resources.get(vo_id); <del> xml += "<VO id=\""+vo_id+"\">"; <add> xml.append("<VO id=\""+vo_id+"\">"); <ide> VirtualOrganization vo = oim.lookupVO(vo_id); <del> xml += "<Name>"+vo.getShortName()+"</Name>"; <del> xml += "<Members>"; <add> xml.append("<Name>"+vo.getShortName()+"</Name>"); <add> xml.append("<Members>"); <ide> for(Integer resource_id : rs) { <del> xml += "<Resource>"; <add> xml.append("<Resource>"); <ide> Resource r = resources.get(resource_id); <del> xml += "<ResourceID>" + r.getID() + "</ResourceID>"; <del> xml += "<ResourceName>" + r.getName() + "</ResourceName>"; <del> xml += "</Resource>"; <add> xml.append("<ResourceID>" + r.getID() + "</ResourceID>"); <add> xml.append("<ResourceName>" + r.getName() + "</ResourceName>"); <add> xml.append("</Resource>"); <ide> } <del> xml += "</Members>"; <del> xml += "</VO>"; <add> xml.append("</Members>"); <add> xml.append("</VO>"); <ide> } <del> xml += "</VOGrouped>"; <add> xml.append("</VOGrouped>"); <ide> <del> xml += "</VOMembership>\n"; <add> xml.append("</VOMembership>\n"); <ide> //output XML to specified location <ide> try{ <ide> logger.debug("Wriging generated XML to : " + Configuration.vomatrix_xml_cache); <ide> FileWriter fstream = new FileWriter(RSVMain.conf.getProperty(Configuration.vomatrix_xml_cache)); <ide> BufferedWriter out = new BufferedWriter(fstream); <del> out.write(xml); <add> out.write(xml.toString()); <ide> out.close(); <ide> } catch (Exception e) { <ide> logger.error("Caught exception while outputing xml cache", e);
Java
apache-2.0
90a0cba862bb054a417296400808a6c15486809b
0
foam-framework/foam,jlhughes/foam,foam-framework/foam,mdittmer/foam,mdittmer/foam,mdittmer/foam,foam-framework/foam,jlhughes/foam,mdittmer/foam,osric-the-knight/foam,foam-framework/foam,jacksonic/foam,jacksonic/foam,jacksonic/foam,osric-the-knight/foam,osric-the-knight/foam,osric-the-knight/foam,foam-framework/foam,jacksonic/foam,jlhughes/foam,jlhughes/foam
/** * @license * Copyright 2013 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package foam.core; import java.io.Serializable; public abstract class AbstractFObject extends PubSubSource implements FObject, Serializable { private static final String[] EMPTY_PROPERTY_TOPIC = new String[] { "property", PubSubSource.ANY }; private boolean frozen = false; public int compare(boolean o1, boolean o2) { return o1 == o2 ? 0 : o1 ? 1 : 0; } public int compare(String o1, String o2) { return o1 == o2 ? 0 : o1 == null ? -1 : o2 == null ? 1 : o1.compareTo(o2); } public int compare(short o1, short o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(int o1, int o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(long o1, long o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(float o1, float o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(double o1, double o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(Object o1, Object o2) { if (o1 instanceof FObject && o2 instanceof FObject) { FObject f1 = (FObject) o1; FObject f2 = (FObject) o2; if (!f1.model().equals(f2.model())) { // Hack that gives unstable order for non-identical models with the same // name. Since that shouldn't happen, this shouldn't cause a problem. int c = f1.model().getName().compareTo(f2.model().getName()); return c == 0 ? 1 : c; } else { // Compare each of the properties, in order. Property[] props = f1.model().getProperties(); for (Property p : props) { int c = p.compare(f1, f2); if (c != 0) return c; } return 0; } } else if (o1 instanceof FObject) { return -1; } else { return 1; } } public int hash(boolean b) { return b ? 1 : 0; } public int hash(String s) { return s == null ? 0 : s.hashCode(); } public int hash(short s) { return s; } public int hash(int i) { return i; } public int hash(long l) { return (int) (l ^ (l >>> 32)); } public int hash(float f) { return hash(Float.floatToIntBits(f)); } public int hash(double d) { return hash(Double.doubleToLongBits(d)); } public int hash(Object o) { return o == null ? 0 : o.hashCode(); } public int compareTo(Object other) { return compare(this, other); } public boolean equals(Object o) { return compareTo(o) == 0; } @Override public abstract int hashCode(); public abstract StringBuilder append(StringBuilder sb); public String toString() { StringBuilder sb = new StringBuilder(); append(sb); return sb.toString(); } public StringBuilder appendToJSON(StringBuilder b) { b.append("{"); b.append("model_:\""); b.append(model().getName()); b.append("\""); for (Property p : model().getProperties()) { // TODO: do not output default values if (p.isTransient()) continue; b.append(","); b.append(p.getName()); b.append(":"); b.append(p.get(this)); // TODO: escape propertly, maybe p.toJSON() } b.append("}"); return b; } public String toJSON() { StringBuilder sb = new StringBuilder(); appendToJSON(sb); return sb.toString(); } public <T> void addPropertyChangeListener(Property<T> prop, PubSubListener<ValueChangeEvent<T>> listener) { if (isFrozen()) return; if (prop == null) subscribe(EMPTY_PROPERTY_TOPIC, listener); else subscribe(prop.getPropertyTopic(), listener); } public <T> void removePropertyChangeListener(Property<T> prop, PubSubListener<ValueChangeEvent<T>> listener) { if (prop == null) unsubscribe(EMPTY_PROPERTY_TOPIC, listener); else unsubscribe(prop.getPropertyTopic(), listener); } public <T> void firePropertyChange(Property<T> prop, T oldValue, T newValue) { publish(prop.getPropertyTopic(), new PropertyChangeEvent<T>(this, prop, oldValue, newValue)); } public void freeze() { frozen = true; unsubscribeAll(); } public boolean isFrozen() { return frozen; } }
java/foam/core/AbstractFObject.java
/** * @license * Copyright 2013 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package foam.core; import java.io.Serializable; public abstract class AbstractFObject extends PubSubSource implements FObject, Serializable { private static final String[] EMPTY_PROPERTY_TOPIC = new String[] { "property", PubSubSource.ANY }; private boolean frozen = false; public int compare(boolean o1, boolean o2) { return o1 == o2 ? 0 : o1 ? 1 : 0; } public int compare(String o1, String o2) { return o1 == o2 ? 0 : o1 == null ? -1 : o2 == null ? 1 : o1.compareTo(o2); } public int compare(short o1, short o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(int o1, int o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(long o1, long o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(float o1, float o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(double o1, double o2) { return o1 == o2 ? 0 : o1 < o2 ? -1 : 1; } public int compare(Object o1, Object o2) { if (o1 instanceof FObject && o2 instanceof FObject) { FObject f1 = (FObject) o1; FObject f2 = (FObject) o2; if (!f1.model().equals(f2.model())) { // Hack that gives unstable order for non-identical models with the same // name. Since that shouldn't happen, this shouldn't cause a problem. int c = f1.model().getName().compareTo(f2.model().getName()); return c == 0 ? 1 : c; } else { // Compare each of the properties, in order. Property[] props = f1.model().getProperties(); for (Property p : props) { int c = p.compare(f1, f2); if (c != 0) return c; } return 0; } } else if (o1 instanceof FObject) { return -1; } else { return 1; } } public int hash(boolean b) { return b ? 1 : 0; } public int hash(String s) { return s == null ? 0 : s.hashCode(); } public int hash(short s) { return s; } public int hash(int i) { return i; } public int hash(long l) { return (int) (l ^ (l >>> 32)); } public int hash(float f) { return hash(Float.floatToIntBits(f)); } public int hash(double d) { return hash(Double.doubleToLongBits(d)); } public int hash(Object o) { return o == null ? 0 : o.hashCode(); } public int compareTo(Object other) { return compare(this, other); } public boolean equals(Object o) { return compareTo(o) == 0; } public abstract StringBuilder append(StringBuilder sb); public String toString() { StringBuilder sb = new StringBuilder(); append(sb); return sb.toString(); } public StringBuilder appendToJSON(StringBuilder b) { b.append("{"); b.append("model_:\""); b.append(model().getName()); b.append("\""); for (Property p : model().getProperties()) { // TODO: do not output default values if (p.isTransient()) continue; b.append(","); b.append(p.getName()); b.append(":"); b.append(p.get(this)); // TODO: escape propertly, maybe p.toJSON() } b.append("}"); return b; } public String toJSON() { StringBuilder sb = new StringBuilder(); appendToJSON(sb); return sb.toString(); } public <T> void addPropertyChangeListener(Property<T> prop, PubSubListener<ValueChangeEvent<T>> listener) { if (isFrozen()) return; if (prop == null) subscribe(EMPTY_PROPERTY_TOPIC, listener); else subscribe(prop.getPropertyTopic(), listener); } public <T> void removePropertyChangeListener(Property<T> prop, PubSubListener<ValueChangeEvent<T>> listener) { if (prop == null) unsubscribe(EMPTY_PROPERTY_TOPIC, listener); else unsubscribe(prop.getPropertyTopic(), listener); } public <T> void firePropertyChange(Property<T> prop, T oldValue, T newValue) { publish(prop.getPropertyTopic(), new PropertyChangeEvent<T>(this, prop, oldValue, newValue)); } public void freeze() { frozen = true; unsubscribeAll(); } public boolean isFrozen() { return frozen; } }
Java: Make hashCode abstract in AbstractFObject, guaranteeing an override
java/foam/core/AbstractFObject.java
Java: Make hashCode abstract in AbstractFObject, guaranteeing an override
<ide><path>ava/foam/core/AbstractFObject.java <ide> return compareTo(o) == 0; <ide> } <ide> <add> @Override <add> public abstract int hashCode(); <add> <ide> public abstract StringBuilder append(StringBuilder sb); <ide> <ide> public String toString() {
JavaScript
mit
971dba370854fe902019f92362b3efb92bdabd0c
0
Modernizr/Modernizr,Modernizr/Modernizr
/*! * Modernizr v2.5.3pre * www.modernizr.com * * Copyright (c) Faruk Ates, Paul Irish, Alex Sexton * Available under the BSD and MIT licenses: www.modernizr.com/license/ */ /* * Modernizr tests which native CSS3 and HTML5 features are available in * the current UA and makes the results available to you in two ways: * as properties on a global Modernizr object, and as classes on the * <html> element. This information allows you to progressively enhance * your pages with a granular level of control over the experience. * * Modernizr has an optional (not included) conditional resource loader * called Modernizr.load(), based on Yepnope.js (yepnopejs.com). * To get a build that includes Modernizr.load(), as well as choosing * which tests to include, go to www.modernizr.com/download/ * * Authors Faruk Ates, Paul Irish, Alex Sexton * Contributors Ryan Seddon, Ben Alman */ window.Modernizr = (function( window, document, undefined ) { var version = '2.5.3pre', Modernizr = {}, // option for enabling the HTML classes to be added enableClasses = true, docElement = document.documentElement, /** * Create our "modernizr" element that we do most feature tests on. */ mod = 'modernizr', modElem = document.createElement(mod), mStyle = modElem.style, /** * Create the input element for various Web Forms feature tests. */ inputElem = document.createElement('input'), smile = ':)', toString = {}.toString, // List of property values to set for css tests. See ticket #21 prefixes = ' -webkit- -moz- -o- -ms- '.split(' '), // Following spec is to expose vendor-specific style properties as: // elem.style.WebkitBorderRadius // and the following would be incorrect: // elem.style.webkitBorderRadius // Webkit ghosts their properties in lowercase but Opera & Moz do not. // Microsoft uses a lowercase `ms` instead of the correct `Ms` in IE8+ // erik.eae.net/archives/2008/03/10/21.48.10/ // More here: github.com/Modernizr/Modernizr/issues/issue/21 omPrefixes = 'Webkit Moz O ms', cssomPrefixes = omPrefixes.split(' '), domPrefixes = omPrefixes.toLowerCase().split(' '), ns = {'svg': 'http://www.w3.org/2000/svg'}, tests = {}, inputs = {}, attrs = {}, classes = [], slice = classes.slice, featureName, // used in testing loop // Inject element with style element and some CSS rules injectElementWithStyles = function( rule, callback, nodes, testnames ) { var style, ret, node, div = document.createElement('div'), // After page load injecting a fake body doesn't work so check if body exists body = document.body, // IE6 and 7 won't return offsetWidth or offsetHeight unless it's in the body element, so we fake it. fakeBody = body ? body : document.createElement('body'); if ( parseInt(nodes, 10) ) { // In order not to give false positives we create a node for each test // This also allows the method to scale for unspecified uses while ( nodes-- ) { node = document.createElement('div'); node.id = testnames ? testnames[nodes] : mod + (nodes + 1); div.appendChild(node); } } // <style> elements in IE6-9 are considered 'NoScope' elements and therefore will be removed // when injected with innerHTML. To get around this you need to prepend the 'NoScope' element // with a 'scoped' element, in our case the soft-hyphen entity as it won't mess with our measurements. // msdn.microsoft.com/en-us/library/ms533897%28VS.85%29.aspx // Documents served as xml will throw if using &shy; so use xml friendly encoded version. See issue #277 style = ['&#173;','<style>', rule, '</style>'].join(''); div.id = mod; // IE6 will false positive on some tests due to the style element inside the test div somehow interfering offsetHeight, so insert it into body or fakebody. // Opera will act all quirky when injecting elements in documentElement when page is served as xml, needs fakebody too. #270 fakeBody.innerHTML += style; fakeBody.appendChild(div); if(!body){ //avoid crashing IE8, if background image is used fakeBody.style.background = ""; docElement.appendChild(fakeBody); } ret = callback(div, rule); // If this is done after page load we don't want to remove the body so check if body exists !body ? fakeBody.parentNode.removeChild(fakeBody) : div.parentNode.removeChild(div); return !!ret; }, // adapted from matchMedia polyfill // by Scott Jehl and Paul Irish // gist.github.com/786768 testMediaQuery = function( mq ) { var matchMedia = window.matchMedia || window.msMatchMedia; if ( matchMedia ) { return matchMedia(mq).matches; } var bool; injectElementWithStyles('@media ' + mq + ' { #' + mod + ' { position: absolute; } }', function( node ) { bool = (window.getComputedStyle ? getComputedStyle(node, null) : node.currentStyle)['position'] == 'absolute'; }); return bool; }, /** * isEventSupported determines if a given element supports the given event * function from yura.thinkweb2.com/isEventSupported/ */ isEventSupported = (function() { var TAGNAMES = { 'select': 'input', 'change': 'input', 'submit': 'form', 'reset': 'form', 'error': 'img', 'load': 'img', 'abort': 'img' }; function isEventSupported( eventName, element ) { element = element || document.createElement(TAGNAMES[eventName] || 'div'); eventName = 'on' + eventName; // When using `setAttribute`, IE skips "unload", WebKit skips "unload" and "resize", whereas `in` "catches" those var isSupported = eventName in element; if ( !isSupported ) { // If it has no `setAttribute` (i.e. doesn't implement Node interface), try generic element if ( !element.setAttribute ) { element = document.createElement('div'); } if ( element.setAttribute && element.removeAttribute ) { element.setAttribute(eventName, ''); isSupported = is(element[eventName], 'function'); // If property was created, "remove it" (by setting value to `undefined`) if ( !is(element[eventName], 'undefined') ) { element[eventName] = undefined; } element.removeAttribute(eventName); } } element = null; return isSupported; } return isEventSupported; })(); // hasOwnProperty shim by kangax needed for Safari 2.0 support var _hasOwnProperty = ({}).hasOwnProperty, hasOwnProperty; if ( !is(_hasOwnProperty, 'undefined') && !is(_hasOwnProperty.call, 'undefined') ) { hasOwnProperty = function (object, property) { return _hasOwnProperty.call(object, property); }; } else { hasOwnProperty = function (object, property) { /* yes, this can give false positives/negatives, but most of the time we don't care about those */ return ((property in object) && is(object.constructor.prototype[property], 'undefined')); }; } // Taken from ES5-shim https://github.com/kriskowal/es5-shim/blob/master/es5-shim.js // ES-5 15.3.4.5 // http://es5.github.com/#x15.3.4.5 if (!Function.prototype.bind) { Function.prototype.bind = function bind(that) { var target = this; if (typeof target != "function") { throw new TypeError(); } var args = slice.call(arguments, 1), bound = function () { if (this instanceof bound) { var F = function(){}; F.prototype = target.prototype; var self = new F; var result = target.apply( self, args.concat(slice.call(arguments)) ); if (Object(result) === result) { return result; } return self; } else { return target.apply( that, args.concat(slice.call(arguments)) ); } }; return bound; }; } /** * setCss applies given styles to the Modernizr DOM node. */ function setCss( str ) { mStyle.cssText = str; } /** * setCssAll extrapolates all vendor-specific css strings. */ function setCssAll( str1, str2 ) { return setCss(prefixes.join(str1 + ';') + ( str2 || '' )); } /** * is returns a boolean for if typeof obj is exactly type. */ function is( obj, type ) { return typeof obj === type; } /** * contains returns a boolean for if substr is found within str. */ function contains( str, substr ) { return !!~('' + str).indexOf(substr); } /** * testProps is a generic CSS / DOM property test; if a browser supports * a certain property, it won't return undefined for it. * A supported CSS property returns empty string when its not yet set. */ function testProps( props, prefixed ) { for ( var i in props ) { if ( mStyle[ props[i] ] !== undefined ) { return prefixed == 'pfx' ? props[i] : true; } } return false; } /** * testDOMProps is a generic DOM property test; if a browser supports * a certain property, it won't return undefined for it. */ function testDOMProps( props, obj, elem ) { for ( var i in props ) { var item = obj[props[i]]; if ( item !== undefined) { // return the property name as a string if (elem === false) return props[i]; // let's bind a function if (is(item, 'function')){ // default to autobind unless override return item.bind(elem || obj); } // return the unbound function or obj or value return item; } } return false; } /** * testPropsAll tests a list of DOM properties we want to check against. * We specify literally ALL possible (known and/or likely) properties on * the element including the non-vendor prefixed one, for forward- * compatibility. */ function testPropsAll( prop, prefixed, elem ) { var ucProp = prop.charAt(0).toUpperCase() + prop.substr(1), props = (prop + ' ' + cssomPrefixes.join(ucProp + ' ') + ucProp).split(' '); // did they call .prefixed('boxSizing') or are we just testing a prop? if(is(prefixed, "string") || is(prefixed, "undefined")) { return testProps(props, prefixed); // otherwise, they called .prefixed('requestAnimationFrame', window[, elem]) } else { props = (prop + ' ' + (domPrefixes).join(ucProp + ' ') + ucProp).split(' '); return testDOMProps(props, prefixed, elem); } } /** * testBundle tests a list of CSS features that require element and style injection. * By bundling them together we can reduce the need to touch the DOM multiple times. */ /*>>testBundle*/ var testBundle = (function( styles, tests ) { var style = styles.join(''), len = tests.length; injectElementWithStyles(style, function( node, rule ) { var style = document.styleSheets[document.styleSheets.length - 1], // IE8 will bork if you create a custom build that excludes both fontface and generatedcontent tests. // So we check for cssRules and that there is a rule available // More here: github.com/Modernizr/Modernizr/issues/288 & github.com/Modernizr/Modernizr/issues/293 cssText = style ? (style.cssRules && style.cssRules[0] ? style.cssRules[0].cssText : style.cssText || '') : '', children = node.childNodes, hash = {}; while ( len-- ) { hash[children[len].id] = children[len]; } /*>>touch*/ Modernizr['touch'] = ('ontouchstart' in window) || window.DocumentTouch && document instanceof DocumentTouch || (hash['touch'] && hash['touch'].offsetTop) === 9; /*>>touch*/ /*>>csstransforms3d*/ Modernizr['csstransforms3d'] = (hash['csstransforms3d'] && hash['csstransforms3d'].offsetLeft) === 9 && hash['csstransforms3d'].offsetHeight === 3; /*>>csstransforms3d*/ /*>>generatedcontent*/Modernizr['generatedcontent'] = (hash['generatedcontent'] && hash['generatedcontent'].offsetHeight) >= 1; /*>>generatedcontent*/ /*>>fontface*/ Modernizr['fontface'] = /src/i.test(cssText) && cssText.indexOf(rule.split(' ')[0]) === 0; /*>>fontface*/ }, len, tests); })([ // Pass in styles to be injected into document /*>>fontface*/ '@font-face {font-family:"font";src:url("https://")}' /*>>fontface*/ /*>>touch*/ ,['@media (',prefixes.join('touch-enabled),('),mod,')', '{#touch{top:9px;position:absolute}}'].join('') /*>>touch*/ /*>>csstransforms3d*/ ,['@media (',prefixes.join('transform-3d),('),mod,')', '{#csstransforms3d{left:9px;position:absolute;height:3px;}}'].join('')/*>>csstransforms3d*/ /*>>generatedcontent*/,['#generatedcontent:after{content:"',smile,'";visibility:hidden}'].join('') /*>>generatedcontent*/ ], [ /*>>fontface*/ 'fontface' /*>>fontface*/ /*>>touch*/ ,'touch' /*>>touch*/ /*>>csstransforms3d*/ ,'csstransforms3d' /*>>csstransforms3d*/ /*>>generatedcontent*/,'generatedcontent' /*>>generatedcontent*/ ]);/*>>testBundle*/ /** * Tests * ----- */ // The *new* flexbox // dev.w3.org/csswg/css3-flexbox tests['flexbox'] = function() { return testPropsAll('flexOrder'); }; // The *old* flexbox // www.w3.org/TR/2009/WD-css3-flexbox-20090723/ tests['flexbox-legacy'] = function() { return testPropsAll('boxDirection'); }; // On the S60 and BB Storm, getContext exists, but always returns undefined // so we actually have to call getContext() to verify // github.com/Modernizr/Modernizr/issues/issue/97/ tests['canvas'] = function() { var elem = document.createElement('canvas'); return !!(elem.getContext && elem.getContext('2d')); }; tests['canvastext'] = function() { return !!(Modernizr['canvas'] && is(document.createElement('canvas').getContext('2d').fillText, 'function')); }; // this test initiates a new webgl context. // webk.it/70117 is tracking a legit feature detect proposal tests['webgl'] = function() { try { var canvas = document.createElement('canvas'), ret; ret = !!(window.WebGLRenderingContext && (canvas.getContext('experimental-webgl') || canvas.getContext('webgl'))); canvas = undefined; } catch (e){ ret = false; } return ret; }; /* * The Modernizr.touch test only indicates if the browser supports * touch events, which does not necessarily reflect a touchscreen * device, as evidenced by tablets running Windows 7 or, alas, * the Palm Pre / WebOS (touch) phones. * * Additionally, Chrome (desktop) used to lie about its support on this, * but that has since been rectified: crbug.com/36415 * * We also test for Firefox 4 Multitouch Support. * * For more info, see: modernizr.github.com/Modernizr/touch.html */ tests['touch'] = function() { return Modernizr['touch']; }; /** * geolocation tests for the new Geolocation API specification. * This test is a standards compliant-only test; for more complete * testing, including a Google Gears fallback, please see: * code.google.com/p/geo-location-javascript/ * or view a fallback solution using google's geo API: * gist.github.com/366184 */ tests['geolocation'] = function() { return !!navigator.geolocation; }; // Per 1.6: // This used to be Modernizr.crosswindowmessaging but the longer // name has been deprecated in favor of a shorter and property-matching one. // The old API is still available in 1.6, but as of 2.0 will throw a warning, // and in the first release thereafter disappear entirely. tests['postmessage'] = function() { return !!window.postMessage; }; // Chrome incognito mode used to throw an exception when using openDatabase // It doesn't anymore. tests['websqldatabase'] = function() { return !!window.openDatabase; }; // Vendors had inconsistent prefixing with the experimental Indexed DB: // - Webkit's implementation is accessible through webkitIndexedDB // - Firefox shipped moz_indexedDB before FF4b9, but since then has been mozIndexedDB // For speed, we don't test the legacy (and beta-only) indexedDB tests['indexedDB'] = function() { return !!testPropsAll("indexedDB",window); }; // documentMode logic from YUI to filter out IE8 Compat Mode // which false positives. tests['hashchange'] = function() { return isEventSupported('hashchange', window) && (document.documentMode === undefined || document.documentMode > 7); }; // Per 1.6: // This used to be Modernizr.historymanagement but the longer // name has been deprecated in favor of a shorter and property-matching one. // The old API is still available in 1.6, but as of 2.0 will throw a warning, // and in the first release thereafter disappear entirely. tests['history'] = function() { return !!(window.history && history.pushState); }; tests['draganddrop'] = function() { var div = document.createElement('div'); return ('draggable' in div) || ('ondragstart' in div && 'ondrop' in div); }; // FIXME: Once FF10 is sunsetted, we can drop prefixed MozWebSocket // bugzil.la/695635 tests['websockets'] = function() { for ( var i = -1, len = cssomPrefixes.length; ++i < len; ){ if ( window[cssomPrefixes[i] + 'WebSocket'] ){ return true; } } return 'WebSocket' in window; }; // css-tricks.com/rgba-browser-support/ tests['rgba'] = function() { // Set an rgba() color and check the returned value setCss('background-color:rgba(150,255,150,.5)'); return contains(mStyle.backgroundColor, 'rgba'); }; tests['hsla'] = function() { // Same as rgba(), in fact, browsers re-map hsla() to rgba() internally, // except IE9 who retains it as hsla setCss('background-color:hsla(120,40%,100%,.5)'); return contains(mStyle.backgroundColor, 'rgba') || contains(mStyle.backgroundColor, 'hsla'); }; tests['multiplebgs'] = function() { // Setting multiple images AND a color on the background shorthand property // and then querying the style.background property value for the number of // occurrences of "url(" is a reliable method for detecting ACTUAL support for this! setCss('background:url(https://),url(https://),red url(https://)'); // If the UA supports multiple backgrounds, there should be three occurrences // of the string "url(" in the return value for elemStyle.background return /(url\s*\(.*?){3}/.test(mStyle.background); }; // In testing support for a given CSS property, it's legit to test: // `elem.style[styleName] !== undefined` // If the property is supported it will return an empty string, // if unsupported it will return undefined. // We'll take advantage of this quick test and skip setting a style // on our modernizr element, but instead just testing undefined vs // empty string. tests['backgroundsize'] = function() { return testPropsAll('backgroundSize'); }; tests['borderimage'] = function() { return testPropsAll('borderImage'); }; // Super comprehensive table about all the unique implementations of // border-radius: muddledramblings.com/table-of-css3-border-radius-compliance tests['borderradius'] = function() { return testPropsAll('borderRadius'); }; // WebOS unfortunately false positives on this test. tests['boxshadow'] = function() { return testPropsAll('boxShadow'); }; // FF3.0 will false positive on this test tests['textshadow'] = function() { return document.createElement('div').style.textShadow === ''; }; tests['opacity'] = function() { // Browsers that actually have CSS Opacity implemented have done so // according to spec, which means their return values are within the // range of [0.0,1.0] - including the leading zero. setCssAll('opacity:.55'); // The non-literal . in this regex is intentional: // German Chrome returns this value as 0,55 // github.com/Modernizr/Modernizr/issues/#issue/59/comment/516632 return /^0.55$/.test(mStyle.opacity); }; // Note, Android < 4 will pass this test, but can only animate // a single property at a time // daneden.me/2011/12/putting-up-with-androids-bullshit/ tests['cssanimations'] = function() { return testPropsAll('animationName'); }; tests['csscolumns'] = function() { return testPropsAll('columnCount'); }; tests['cssgradients'] = function() { /** * For CSS Gradients syntax, please see: * webkit.org/blog/175/introducing-css-gradients/ * developer.mozilla.org/en/CSS/-moz-linear-gradient * developer.mozilla.org/en/CSS/-moz-radial-gradient * dev.w3.org/csswg/css3-images/#gradients- */ var str1 = 'background-image:', str2 = 'gradient(linear,left top,right bottom,from(#9f9),to(white));', str3 = 'linear-gradient(left top,#9f9, white);'; setCss( // legacy webkit syntax (FIXME: remove when syntax not in use anymore) (str1 + '-webkit- '.split(' ').join(str2 + str1) // standard syntax // trailing 'background-image:' + prefixes.join(str3 + str1)).slice(0, -str1.length) ); return contains(mStyle.backgroundImage, 'gradient'); }; tests['cssreflections'] = function() { return testPropsAll('boxReflect'); }; tests['csstransforms'] = function() { return !!testPropsAll('transform'); }; tests['csstransforms3d'] = function() { var ret = !!testPropsAll('perspective'); // Webkit's 3D transforms are passed off to the browser's own graphics renderer. // It works fine in Safari on Leopard and Snow Leopard, but not in Chrome in // some conditions. As a result, Webkit typically recognizes the syntax but // will sometimes throw a false positive, thus we must do a more thorough check: if ( ret && 'webkitPerspective' in docElement.style ) { // Webkit allows this media query to succeed only if the feature is enabled. // `@media (transform-3d),(-o-transform-3d),(-moz-transform-3d),(-ms-transform-3d),(-webkit-transform-3d),(modernizr){ ... }` ret = Modernizr['csstransforms3d']; } return ret; }; tests['csstransitions'] = function() { return testPropsAll('transition'); }; /*>>fontface*/ // @font-face detection routine by Diego Perini // javascript.nwbox.com/CSSSupport/ // false positives in WebOS: github.com/Modernizr/Modernizr/issues/342 tests['fontface'] = function() { return Modernizr['fontface']; }; /*>>fontface*/ // CSS generated content detection tests['generatedcontent'] = function() { return Modernizr['generatedcontent']; }; // These tests evaluate support of the video/audio elements, as well as // testing what types of content they support. // // We're using the Boolean constructor here, so that we can extend the value // e.g. Modernizr.video // true // Modernizr.video.ogg // 'probably' // // Codec values from : github.com/NielsLeenheer/html5test/blob/9106a8/index.html#L845 // thx to NielsLeenheer and zcorpan // Note: in some older browsers, "no" was a return value instead of empty string. // It was live in FF3.5.0 and 3.5.1, but fixed in 3.5.2 // It was also live in Safari 4.0.0 - 4.0.4, but fixed in 4.0.5 tests['video'] = function() { var elem = document.createElement('video'), bool = false; // IE9 Running on Windows Server SKU can cause an exception to be thrown, bug #224 try { if ( bool = !!elem.canPlayType ) { bool = new Boolean(bool); bool.ogg = elem.canPlayType('video/ogg; codecs="theora"') .replace(/^no$/,''); bool.h264 = elem.canPlayType('video/mp4; codecs="avc1.42E01E"') .replace(/^no$/,''); bool.webm = elem.canPlayType('video/webm; codecs="vp8, vorbis"').replace(/^no$/,''); } } catch(e) { } return bool; }; tests['audio'] = function() { var elem = document.createElement('audio'), bool = false; try { if ( bool = !!elem.canPlayType ) { bool = new Boolean(bool); bool.ogg = elem.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,''); bool.mp3 = elem.canPlayType('audio/mpeg;') .replace(/^no$/,''); // Mimetypes accepted: // developer.mozilla.org/En/Media_formats_supported_by_the_audio_and_video_elements // bit.ly/iphoneoscodecs bool.wav = elem.canPlayType('audio/wav; codecs="1"') .replace(/^no$/,''); bool.m4a = ( elem.canPlayType('audio/x-m4a;') || elem.canPlayType('audio/aac;')) .replace(/^no$/,''); } } catch(e) { } return bool; }; // In FF4, if disabled, window.localStorage should === null. // Normally, we could not test that directly and need to do a // `('localStorage' in window) && ` test first because otherwise Firefox will // throw bugzil.la/365772 if cookies are disabled // Also in iOS5 Private Browsing mode, attepting to use localStorage.setItem // will throw the exception: // QUOTA_EXCEEDED_ERRROR DOM Exception 22. // Peculiarly, getItem and removeItem calls do not throw. // Because we are forced to try/catch this, we'll go aggressive. // Just FWIW: IE8 Compat mode supports these features completely: // www.quirksmode.org/dom/html5.html // But IE8 doesn't support either with local files tests['localstorage'] = function() { try { localStorage.setItem(mod, mod); localStorage.removeItem(mod); return true; } catch(e) { return false; } }; tests['sessionstorage'] = function() { try { sessionStorage.setItem(mod, mod); sessionStorage.removeItem(mod); return true; } catch(e) { return false; } }; tests['webworkers'] = function() { return !!window.Worker; }; tests['applicationcache'] = function() { return !!window.applicationCache; }; // Thanks to Erik Dahlstrom tests['svg'] = function() { return !!document.createElementNS && !!document.createElementNS(ns.svg, 'svg').createSVGRect; }; // specifically for SVG inline in HTML, not within XHTML // test page: paulirish.com/demo/inline-svg tests['inlinesvg'] = function() { var div = document.createElement('div'); div.innerHTML = '<svg/>'; return (div.firstChild && div.firstChild.namespaceURI) == ns.svg; }; // SVG SMIL animation tests['smil'] = function() { return !!document.createElementNS && /SVGAnimate/.test(toString.call(document.createElementNS(ns.svg, 'animate'))); }; // This test is only for clip paths in SVG proper, not clip paths on HTML content // demo: srufaculty.sru.edu/david.dailey/svg/newstuff/clipPath4.svg // However read the comments to dig into applying SVG clippaths to HTML content here: // github.com/Modernizr/Modernizr/issues/213#issuecomment-1149491 tests['svgclippaths'] = function() { return !!document.createElementNS && /SVGClipPath/.test(toString.call(document.createElementNS(ns.svg, 'clipPath'))); }; // input features and input types go directly onto the ret object, bypassing the tests loop. // Hold this guy to execute in a moment. function webforms() { // Run through HTML5's new input attributes to see if the UA understands any. // We're using f which is the <input> element created early on // Mike Taylr has created a comprehensive resource for testing these attributes // when applied to all input types: // miketaylr.com/code/input-type-attr.html // spec: www.whatwg.org/specs/web-apps/current-work/multipage/the-input-element.html#input-type-attr-summary // Only input placeholder is tested while textarea's placeholder is not. // Currently Safari 4 and Opera 11 have support only for the input placeholder // Both tests are available in feature-detects/forms-placeholder.js Modernizr['input'] = (function( props ) { for ( var i = 0, len = props.length; i < len; i++ ) { attrs[ props[i] ] = !!(props[i] in inputElem); } if (attrs.list){ // safari false positive's on datalist: webk.it/74252 // see also github.com/Modernizr/Modernizr/issues/146 attrs.list = !!(document.createElement('datalist') && window.HTMLDataListElement); } return attrs; })('autocomplete autofocus list placeholder max min multiple pattern required step'.split(' ')); // Run through HTML5's new input types to see if the UA understands any. // This is put behind the tests runloop because it doesn't return a // true/false like all the other tests; instead, it returns an object // containing each input type with its corresponding true/false value // Big thanks to @miketaylr for the html5 forms expertise. miketaylr.com/ Modernizr['inputtypes'] = (function(props) { for ( var i = 0, bool, inputElemType, defaultView, len = props.length; i < len; i++ ) { inputElem.setAttribute('type', inputElemType = props[i]); bool = inputElem.type !== 'text'; // We first check to see if the type we give it sticks.. // If the type does, we feed it a textual value, which shouldn't be valid. // If the value doesn't stick, we know there's input sanitization which infers a custom UI if ( bool ) { inputElem.value = smile; inputElem.style.cssText = 'position:absolute;visibility:hidden;'; if ( /^range$/.test(inputElemType) && inputElem.style.WebkitAppearance !== undefined ) { docElement.appendChild(inputElem); defaultView = document.defaultView; // Safari 2-4 allows the smiley as a value, despite making a slider bool = defaultView.getComputedStyle && defaultView.getComputedStyle(inputElem, null).WebkitAppearance !== 'textfield' && // Mobile android web browser has false positive, so must // check the height to see if the widget is actually there. (inputElem.offsetHeight !== 0); docElement.removeChild(inputElem); } else if ( /^(search|tel)$/.test(inputElemType) ){ // Spec doesnt define any special parsing or detectable UI // behaviors so we pass these through as true // Interestingly, opera fails the earlier test, so it doesn't // even make it here. } else if ( /^(url|email)$/.test(inputElemType) ) { // Real url and email support comes with prebaked validation. bool = inputElem.checkValidity && inputElem.checkValidity() === false; } else if ( /^color$/.test(inputElemType) ) { // chuck into DOM and force reflow for Opera bug in 11.00 // github.com/Modernizr/Modernizr/issues#issue/159 docElement.appendChild(inputElem); docElement.offsetWidth; bool = inputElem.value != smile; docElement.removeChild(inputElem); } else { // If the upgraded input compontent rejects the :) text, we got a winner bool = inputElem.value != smile; } } inputs[ props[i] ] = !!bool; } return inputs; })('search tel url email datetime date month week time datetime-local number range color'.split(' ')); } // End of test definitions // ----------------------- // Run through all tests and detect their support in the current UA. // todo: hypothetically we could be doing an array of tests and use a basic loop here. for ( var feature in tests ) { if ( hasOwnProperty(tests, feature) ) { // run the test, throw the return value into the Modernizr, // then based on that boolean, define an appropriate className // and push it into an array of classes we'll join later. featureName = feature.toLowerCase(); Modernizr[featureName] = tests[feature](); classes.push((Modernizr[featureName] ? '' : 'no-') + featureName); } } // input tests need to run. Modernizr.input || webforms(); /** * addTest allows the user to define their own feature tests * the result will be added onto the Modernizr object, * as well as an appropriate className set on the html element * * @param feature - String naming the feature * @param test - Function returning true if feature is supported, false if not */ Modernizr.addTest = function ( feature, test ) { if ( typeof feature == 'object' ) { for ( var key in feature ) { if ( hasOwnProperty( feature, key ) ) { Modernizr.addTest( key, feature[ key ] ); } } } else { feature = feature.toLowerCase(); if ( Modernizr[feature] !== undefined ) { // we're going to quit if you're trying to overwrite an existing test // if we were to allow it, we'd do this: // var re = new RegExp("\\b(no-)?" + feature + "\\b"); // docElement.className = docElement.className.replace( re, '' ); // but, no rly, stuff 'em. return Modernizr; } test = typeof test == 'function' ? test() : test; docElement.className += ' ' + (test ? '' : 'no-') + feature; Modernizr[feature] = test; } return Modernizr; // allow chaining. }; // Reset modElem.cssText to nothing to reduce memory footprint. setCss(''); modElem = inputElem = null; //>>BEGIN IEPP // Enable HTML 5 elements for styling in IE & add HTML5 css /*! HTML5 Shiv vpre3.4 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed */ ;(function(window, document) { /** Preset options */ var options = window.html5 || {}; /** Used to skip problem elements */ var reSkip = /^<|^(?:button|form|map|select|textarea)$/i; /** Detect whether the browser supports default html5 styles */ var supportsHtml5Styles; /** Detect whether the browser supports unknown elements */ var supportsUnknownElements; (function() { var a = document.createElement('a'); a.innerHTML = '<xyz></xyz>'; //if the hidden property is implemented we can assume, that the browser supports HTML5 Styles supportsHtml5Styles = ('hidden' in a); supportsUnknownElements = a.childNodes.length == 1 || (function() { // assign a false positive if unable to shiv try { (document.createElement)('a'); } catch(e) { return true; } var frag = document.createDocumentFragment(); return ( typeof frag.cloneNode == 'undefined' || typeof frag.createDocumentFragment == 'undefined' || typeof frag.createElement == 'undefined' ); }()); }()); /*--------------------------------------------------------------------------*/ /** * Creates a style sheet with the given CSS text and adds it to the document. * @private * @param {Document} ownerDocument The document. * @param {String} cssText The CSS text. * @returns {StyleSheet} The style element. */ function addStyleSheet(ownerDocument, cssText) { var p = ownerDocument.createElement('p'), parent = ownerDocument.getElementsByTagName('head')[0] || ownerDocument.documentElement; p.innerHTML = 'x<style>' + cssText + '</style>'; return parent.insertBefore(p.lastChild, parent.firstChild); } /** * Returns the value of `html5.elements` as an array. * @private * @returns {Array} An array of shived element node names. */ function getElements() { var elements = html5.elements; return typeof elements == 'string' ? elements.split(' ') : elements; } /** * Shivs the `createElement` and `createDocumentFragment` methods of the document. * @private * @param {Document|DocumentFragment} ownerDocument The document. */ function shivMethods(ownerDocument) { var cache = {}, docCreateElement = ownerDocument.createElement, docCreateFragment = ownerDocument.createDocumentFragment, frag = docCreateFragment(); ownerDocument.createElement = function(nodeName) { // Avoid adding some elements to fragments in IE < 9 because // * Attributes like `name` or `type` cannot be set/changed once an element // is inserted into a document/fragment // * Link elements with `src` attributes that are inaccessible, as with // a 403 response, will cause the tab/window to crash // * Script elements appended to fragments will execute when their `src` // or `text` property is set var node = (cache[nodeName] || (cache[nodeName] = docCreateElement(nodeName))).cloneNode(); return html5.shivMethods && node.canHaveChildren && !reSkip.test(nodeName) ? frag.appendChild(node) : node; }; ownerDocument.createDocumentFragment = Function('h,f', 'return function(){' + 'var n=f.cloneNode(),c=n.createElement;' + 'h.shivMethods&&(' + // unroll the `createElement` calls getElements().join().replace(/\w+/g, function(nodeName) { cache[nodeName] = docCreateElement(nodeName); frag.createElement(nodeName); return 'c("' + nodeName + '")'; }) + ');return n}' )(html5, frag); } /*--------------------------------------------------------------------------*/ /** * Shivs the given document. * @memberOf html5 * @param {Document} ownerDocument The document to shiv. * @returns {Document} The shived document. */ function shivDocument(ownerDocument) { var shived; if (ownerDocument.documentShived) { return ownerDocument; } if (html5.shivCSS && !supportsHtml5Styles) { shived = !!addStyleSheet(ownerDocument, // corrects block display not defined in IE6/7/8/9 'article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}' + // corrects audio display not defined in IE6/7/8/9 'audio{display:none}' + // corrects canvas and video display not defined in IE6/7/8/9 'canvas,video{display:inline-block;*display:inline;*zoom:1}' + // corrects 'hidden' attribute and audio[controls] display not present in IE7/8/9 '[hidden]{display:none}audio[controls]{display:inline-block;*display:inline;*zoom:1}' + // adds styling not present in IE6/7/8/9 'mark{background:#FF0;color:#000}' ); } if (!supportsUnknownElements) { shived = !shivMethods(ownerDocument); } if (shived) { ownerDocument.documentShived = shived; } return ownerDocument; } /*--------------------------------------------------------------------------*/ /** * The `html5` object is exposed so that more elements can be shived and * existing shiving can be detected on iframes. * @type Object * @example * * // options can be changed before the script is included * html5 = { 'elements': 'mark section', 'shivCSS': false, 'shivMethods': false }; */ var html5 = { /** * An array or space separated string of node names of the elements to shiv. * @memberOf html5 * @type Array|String */ 'elements': options.elements || 'abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video', /** * A flag to indicate that the HTML5 style sheet should be inserted. * @memberOf html5 * @type Boolean */ 'shivCSS': !(options.shivCSS === false), /** * A flag to indicate that the document's `createElement` and `createDocumentFragment` * methods should be overwritten. * @memberOf html5 * @type Boolean */ 'shivMethods': !(options.shivMethods === false), /** * A string to describe the type of `html5` object ("default" or "default print"). * @memberOf html5 * @type String */ 'type': 'default', // shivs the document according to the specified `html5` object options 'shivDocument': shivDocument }; /*--------------------------------------------------------------------------*/ // expose html5 window.html5 = html5; // shiv the document shivDocument(document); }(this, document)); //>>END IEPP // Assign private properties to the return object with prefix Modernizr._version = version; // expose these for the plugin API. Look in the source for how to join() them against your input Modernizr._prefixes = prefixes; Modernizr._domPrefixes = domPrefixes; Modernizr._cssomPrefixes = cssomPrefixes; // Modernizr.mq tests a given media query, live against the current state of the window // A few important notes: // * If a browser does not support media queries at all (eg. oldIE) the mq() will always return false // * A max-width or orientation query will be evaluated against the current state, which may change later. // * You must specify values. Eg. If you are testing support for the min-width media query use: // Modernizr.mq('(min-width:0)') // usage: // Modernizr.mq('only screen and (max-width:768)') Modernizr.mq = testMediaQuery; // Modernizr.hasEvent() detects support for a given event, with an optional element to test on // Modernizr.hasEvent('gesturestart', elem) Modernizr.hasEvent = isEventSupported; // Modernizr.testProp() investigates whether a given style property is recognized // Note that the property names must be provided in the camelCase variant. // Modernizr.testProp('pointerEvents') Modernizr.testProp = function(prop){ return testProps([prop]); }; // Modernizr.testAllProps() investigates whether a given style property, // or any of its vendor-prefixed variants, is recognized // Note that the property names must be provided in the camelCase variant. // Modernizr.testAllProps('boxSizing') Modernizr.testAllProps = testPropsAll; // Modernizr.testStyles() allows you to add custom styles to the document and test an element afterwards // Modernizr.testStyles('#modernizr { position:absolute }', function(elem, rule){ ... }) Modernizr.testStyles = injectElementWithStyles; // Modernizr.prefixed() returns the prefixed or nonprefixed property name variant of your input // Modernizr.prefixed('boxSizing') // 'MozBoxSizing' // Properties must be passed as dom-style camelcase, rather than `box-sizing` hypentated style. // Return values will also be the camelCase variant, if you need to translate that to hypenated style use: // // str.replace(/([A-Z])/g, function(str,m1){ return '-' + m1.toLowerCase(); }).replace(/^ms-/,'-ms-'); // If you're trying to ascertain which transition end event to bind to, you might do something like... // // var transEndEventNames = { // 'WebkitTransition' : 'webkitTransitionEnd', // 'MozTransition' : 'transitionend', // 'OTransition' : 'oTransitionEnd', // 'msTransition' : 'MsTransitionEnd', // 'transition' : 'transitionend' // }, // transEndEventName = transEndEventNames[ Modernizr.prefixed('transition') ]; Modernizr.prefixed = function(prop, obj, elem){ if(!obj) { return testPropsAll(prop, 'pfx'); } else { // Testing DOM property e.g. Modernizr.prefixed('requestAnimationFrame', window) // 'mozRequestAnimationFrame' return testPropsAll(prop, obj, elem); } }; // Remove "no-js" class from <html> element, if it exists: docElement.className = docElement.className.replace(/(^|\s)no-js(\s|$)/, '$1$2') + // Add the new classes to the <html> element. (enableClasses ? ' js ' + classes.join(' ') : ''); return Modernizr; })(this, this.document);
modernizr.js
/*! * Modernizr v2.5.3pre * www.modernizr.com * * Copyright (c) Faruk Ates, Paul Irish, Alex Sexton * Available under the BSD and MIT licenses: www.modernizr.com/license/ */ /* * Modernizr tests which native CSS3 and HTML5 features are available in * the current UA and makes the results available to you in two ways: * as properties on a global Modernizr object, and as classes on the * <html> element. This information allows you to progressively enhance * your pages with a granular level of control over the experience. * * Modernizr has an optional (not included) conditional resource loader * called Modernizr.load(), based on Yepnope.js (yepnopejs.com). * To get a build that includes Modernizr.load(), as well as choosing * which tests to include, go to www.modernizr.com/download/ * * Authors Faruk Ates, Paul Irish, Alex Sexton * Contributors Ryan Seddon, Ben Alman */ window.Modernizr = (function( window, document, undefined ) { var version = '2.5.3pre', Modernizr = {}, // option for enabling the HTML classes to be added enableClasses = true, docElement = document.documentElement, /** * Create our "modernizr" element that we do most feature tests on. */ mod = 'modernizr', modElem = document.createElement(mod), mStyle = modElem.style, /** * Create the input element for various Web Forms feature tests. */ inputElem = document.createElement('input'), smile = ':)', toString = {}.toString, // List of property values to set for css tests. See ticket #21 prefixes = ' -webkit- -moz- -o- -ms- '.split(' '), // Following spec is to expose vendor-specific style properties as: // elem.style.WebkitBorderRadius // and the following would be incorrect: // elem.style.webkitBorderRadius // Webkit ghosts their properties in lowercase but Opera & Moz do not. // Microsoft uses a lowercase `ms` instead of the correct `Ms` in IE8+ // erik.eae.net/archives/2008/03/10/21.48.10/ // More here: github.com/Modernizr/Modernizr/issues/issue/21 omPrefixes = 'Webkit Moz O ms', cssomPrefixes = omPrefixes.split(' '), domPrefixes = omPrefixes.toLowerCase().split(' '), ns = {'svg': 'http://www.w3.org/2000/svg'}, tests = {}, inputs = {}, attrs = {}, classes = [], slice = classes.slice, featureName, // used in testing loop // Inject element with style element and some CSS rules injectElementWithStyles = function( rule, callback, nodes, testnames ) { var style, ret, node, div = document.createElement('div'), // After page load injecting a fake body doesn't work so check if body exists body = document.body, // IE6 and 7 won't return offsetWidth or offsetHeight unless it's in the body element, so we fake it. fakeBody = body ? body : document.createElement('body'); if ( parseInt(nodes, 10) ) { // In order not to give false positives we create a node for each test // This also allows the method to scale for unspecified uses while ( nodes-- ) { node = document.createElement('div'); node.id = testnames ? testnames[nodes] : mod + (nodes + 1); div.appendChild(node); } } // <style> elements in IE6-9 are considered 'NoScope' elements and therefore will be removed // when injected with innerHTML. To get around this you need to prepend the 'NoScope' element // with a 'scoped' element, in our case the soft-hyphen entity as it won't mess with our measurements. // msdn.microsoft.com/en-us/library/ms533897%28VS.85%29.aspx // Documents served as xml will throw if using &shy; so use xml friendly encoded version. See issue #277 style = ['&#173;','<style>', rule, '</style>'].join(''); div.id = mod; // IE6 will false positive on some tests due to the style element inside the test div somehow interfering offsetHeight, so insert it into body or fakebody. // Opera will act all quirky when injecting elements in documentElement when page is served as xml, needs fakebody too. #270 fakeBody.innerHTML += style; fakeBody.appendChild(div); if(!body){ docElement.appendChild(fakeBody); } ret = callback(div, rule); // If this is done after page load we don't want to remove the body so check if body exists !body ? fakeBody.parentNode.removeChild(fakeBody) : div.parentNode.removeChild(div); return !!ret; }, // adapted from matchMedia polyfill // by Scott Jehl and Paul Irish // gist.github.com/786768 testMediaQuery = function( mq ) { var matchMedia = window.matchMedia || window.msMatchMedia; if ( matchMedia ) { return matchMedia(mq).matches; } var bool; injectElementWithStyles('@media ' + mq + ' { #' + mod + ' { position: absolute; } }', function( node ) { bool = (window.getComputedStyle ? getComputedStyle(node, null) : node.currentStyle)['position'] == 'absolute'; }); return bool; }, /** * isEventSupported determines if a given element supports the given event * function from yura.thinkweb2.com/isEventSupported/ */ isEventSupported = (function() { var TAGNAMES = { 'select': 'input', 'change': 'input', 'submit': 'form', 'reset': 'form', 'error': 'img', 'load': 'img', 'abort': 'img' }; function isEventSupported( eventName, element ) { element = element || document.createElement(TAGNAMES[eventName] || 'div'); eventName = 'on' + eventName; // When using `setAttribute`, IE skips "unload", WebKit skips "unload" and "resize", whereas `in` "catches" those var isSupported = eventName in element; if ( !isSupported ) { // If it has no `setAttribute` (i.e. doesn't implement Node interface), try generic element if ( !element.setAttribute ) { element = document.createElement('div'); } if ( element.setAttribute && element.removeAttribute ) { element.setAttribute(eventName, ''); isSupported = is(element[eventName], 'function'); // If property was created, "remove it" (by setting value to `undefined`) if ( !is(element[eventName], 'undefined') ) { element[eventName] = undefined; } element.removeAttribute(eventName); } } element = null; return isSupported; } return isEventSupported; })(); // hasOwnProperty shim by kangax needed for Safari 2.0 support var _hasOwnProperty = ({}).hasOwnProperty, hasOwnProperty; if ( !is(_hasOwnProperty, 'undefined') && !is(_hasOwnProperty.call, 'undefined') ) { hasOwnProperty = function (object, property) { return _hasOwnProperty.call(object, property); }; } else { hasOwnProperty = function (object, property) { /* yes, this can give false positives/negatives, but most of the time we don't care about those */ return ((property in object) && is(object.constructor.prototype[property], 'undefined')); }; } // Taken from ES5-shim https://github.com/kriskowal/es5-shim/blob/master/es5-shim.js // ES-5 15.3.4.5 // http://es5.github.com/#x15.3.4.5 if (!Function.prototype.bind) { Function.prototype.bind = function bind(that) { var target = this; if (typeof target != "function") { throw new TypeError(); } var args = slice.call(arguments, 1), bound = function () { if (this instanceof bound) { var F = function(){}; F.prototype = target.prototype; var self = new F; var result = target.apply( self, args.concat(slice.call(arguments)) ); if (Object(result) === result) { return result; } return self; } else { return target.apply( that, args.concat(slice.call(arguments)) ); } }; return bound; }; } /** * setCss applies given styles to the Modernizr DOM node. */ function setCss( str ) { mStyle.cssText = str; } /** * setCssAll extrapolates all vendor-specific css strings. */ function setCssAll( str1, str2 ) { return setCss(prefixes.join(str1 + ';') + ( str2 || '' )); } /** * is returns a boolean for if typeof obj is exactly type. */ function is( obj, type ) { return typeof obj === type; } /** * contains returns a boolean for if substr is found within str. */ function contains( str, substr ) { return !!~('' + str).indexOf(substr); } /** * testProps is a generic CSS / DOM property test; if a browser supports * a certain property, it won't return undefined for it. * A supported CSS property returns empty string when its not yet set. */ function testProps( props, prefixed ) { for ( var i in props ) { if ( mStyle[ props[i] ] !== undefined ) { return prefixed == 'pfx' ? props[i] : true; } } return false; } /** * testDOMProps is a generic DOM property test; if a browser supports * a certain property, it won't return undefined for it. */ function testDOMProps( props, obj, elem ) { for ( var i in props ) { var item = obj[props[i]]; if ( item !== undefined) { // return the property name as a string if (elem === false) return props[i]; // let's bind a function if (is(item, 'function')){ // default to autobind unless override return item.bind(elem || obj); } // return the unbound function or obj or value return item; } } return false; } /** * testPropsAll tests a list of DOM properties we want to check against. * We specify literally ALL possible (known and/or likely) properties on * the element including the non-vendor prefixed one, for forward- * compatibility. */ function testPropsAll( prop, prefixed, elem ) { var ucProp = prop.charAt(0).toUpperCase() + prop.substr(1), props = (prop + ' ' + cssomPrefixes.join(ucProp + ' ') + ucProp).split(' '); // did they call .prefixed('boxSizing') or are we just testing a prop? if(is(prefixed, "string") || is(prefixed, "undefined")) { return testProps(props, prefixed); // otherwise, they called .prefixed('requestAnimationFrame', window[, elem]) } else { props = (prop + ' ' + (domPrefixes).join(ucProp + ' ') + ucProp).split(' '); return testDOMProps(props, prefixed, elem); } } /** * testBundle tests a list of CSS features that require element and style injection. * By bundling them together we can reduce the need to touch the DOM multiple times. */ /*>>testBundle*/ var testBundle = (function( styles, tests ) { var style = styles.join(''), len = tests.length; injectElementWithStyles(style, function( node, rule ) { var style = document.styleSheets[document.styleSheets.length - 1], // IE8 will bork if you create a custom build that excludes both fontface and generatedcontent tests. // So we check for cssRules and that there is a rule available // More here: github.com/Modernizr/Modernizr/issues/288 & github.com/Modernizr/Modernizr/issues/293 cssText = style ? (style.cssRules && style.cssRules[0] ? style.cssRules[0].cssText : style.cssText || '') : '', children = node.childNodes, hash = {}; while ( len-- ) { hash[children[len].id] = children[len]; } /*>>touch*/ Modernizr['touch'] = ('ontouchstart' in window) || window.DocumentTouch && document instanceof DocumentTouch || (hash['touch'] && hash['touch'].offsetTop) === 9; /*>>touch*/ /*>>csstransforms3d*/ Modernizr['csstransforms3d'] = (hash['csstransforms3d'] && hash['csstransforms3d'].offsetLeft) === 9 && hash['csstransforms3d'].offsetHeight === 3; /*>>csstransforms3d*/ /*>>generatedcontent*/Modernizr['generatedcontent'] = (hash['generatedcontent'] && hash['generatedcontent'].offsetHeight) >= 1; /*>>generatedcontent*/ /*>>fontface*/ Modernizr['fontface'] = /src/i.test(cssText) && cssText.indexOf(rule.split(' ')[0]) === 0; /*>>fontface*/ }, len, tests); })([ // Pass in styles to be injected into document /*>>fontface*/ '@font-face {font-family:"font";src:url("https://")}' /*>>fontface*/ /*>>touch*/ ,['@media (',prefixes.join('touch-enabled),('),mod,')', '{#touch{top:9px;position:absolute}}'].join('') /*>>touch*/ /*>>csstransforms3d*/ ,['@media (',prefixes.join('transform-3d),('),mod,')', '{#csstransforms3d{left:9px;position:absolute;height:3px;}}'].join('')/*>>csstransforms3d*/ /*>>generatedcontent*/,['#generatedcontent:after{content:"',smile,'";visibility:hidden}'].join('') /*>>generatedcontent*/ ], [ /*>>fontface*/ 'fontface' /*>>fontface*/ /*>>touch*/ ,'touch' /*>>touch*/ /*>>csstransforms3d*/ ,'csstransforms3d' /*>>csstransforms3d*/ /*>>generatedcontent*/,'generatedcontent' /*>>generatedcontent*/ ]);/*>>testBundle*/ /** * Tests * ----- */ // The *new* flexbox // dev.w3.org/csswg/css3-flexbox tests['flexbox'] = function() { return testPropsAll('flexOrder'); }; // The *old* flexbox // www.w3.org/TR/2009/WD-css3-flexbox-20090723/ tests['flexbox-legacy'] = function() { return testPropsAll('boxDirection'); }; // On the S60 and BB Storm, getContext exists, but always returns undefined // so we actually have to call getContext() to verify // github.com/Modernizr/Modernizr/issues/issue/97/ tests['canvas'] = function() { var elem = document.createElement('canvas'); return !!(elem.getContext && elem.getContext('2d')); }; tests['canvastext'] = function() { return !!(Modernizr['canvas'] && is(document.createElement('canvas').getContext('2d').fillText, 'function')); }; // this test initiates a new webgl context. // webk.it/70117 is tracking a legit feature detect proposal tests['webgl'] = function() { try { var canvas = document.createElement('canvas'), ret; ret = !!(window.WebGLRenderingContext && (canvas.getContext('experimental-webgl') || canvas.getContext('webgl'))); canvas = undefined; } catch (e){ ret = false; } return ret; }; /* * The Modernizr.touch test only indicates if the browser supports * touch events, which does not necessarily reflect a touchscreen * device, as evidenced by tablets running Windows 7 or, alas, * the Palm Pre / WebOS (touch) phones. * * Additionally, Chrome (desktop) used to lie about its support on this, * but that has since been rectified: crbug.com/36415 * * We also test for Firefox 4 Multitouch Support. * * For more info, see: modernizr.github.com/Modernizr/touch.html */ tests['touch'] = function() { return Modernizr['touch']; }; /** * geolocation tests for the new Geolocation API specification. * This test is a standards compliant-only test; for more complete * testing, including a Google Gears fallback, please see: * code.google.com/p/geo-location-javascript/ * or view a fallback solution using google's geo API: * gist.github.com/366184 */ tests['geolocation'] = function() { return !!navigator.geolocation; }; // Per 1.6: // This used to be Modernizr.crosswindowmessaging but the longer // name has been deprecated in favor of a shorter and property-matching one. // The old API is still available in 1.6, but as of 2.0 will throw a warning, // and in the first release thereafter disappear entirely. tests['postmessage'] = function() { return !!window.postMessage; }; // Chrome incognito mode used to throw an exception when using openDatabase // It doesn't anymore. tests['websqldatabase'] = function() { return !!window.openDatabase; }; // Vendors had inconsistent prefixing with the experimental Indexed DB: // - Webkit's implementation is accessible through webkitIndexedDB // - Firefox shipped moz_indexedDB before FF4b9, but since then has been mozIndexedDB // For speed, we don't test the legacy (and beta-only) indexedDB tests['indexedDB'] = function() { return !!testPropsAll("indexedDB",window); }; // documentMode logic from YUI to filter out IE8 Compat Mode // which false positives. tests['hashchange'] = function() { return isEventSupported('hashchange', window) && (document.documentMode === undefined || document.documentMode > 7); }; // Per 1.6: // This used to be Modernizr.historymanagement but the longer // name has been deprecated in favor of a shorter and property-matching one. // The old API is still available in 1.6, but as of 2.0 will throw a warning, // and in the first release thereafter disappear entirely. tests['history'] = function() { return !!(window.history && history.pushState); }; tests['draganddrop'] = function() { var div = document.createElement('div'); return ('draggable' in div) || ('ondragstart' in div && 'ondrop' in div); }; // FIXME: Once FF10 is sunsetted, we can drop prefixed MozWebSocket // bugzil.la/695635 tests['websockets'] = function() { for ( var i = -1, len = cssomPrefixes.length; ++i < len; ){ if ( window[cssomPrefixes[i] + 'WebSocket'] ){ return true; } } return 'WebSocket' in window; }; // css-tricks.com/rgba-browser-support/ tests['rgba'] = function() { // Set an rgba() color and check the returned value setCss('background-color:rgba(150,255,150,.5)'); return contains(mStyle.backgroundColor, 'rgba'); }; tests['hsla'] = function() { // Same as rgba(), in fact, browsers re-map hsla() to rgba() internally, // except IE9 who retains it as hsla setCss('background-color:hsla(120,40%,100%,.5)'); return contains(mStyle.backgroundColor, 'rgba') || contains(mStyle.backgroundColor, 'hsla'); }; tests['multiplebgs'] = function() { // Setting multiple images AND a color on the background shorthand property // and then querying the style.background property value for the number of // occurrences of "url(" is a reliable method for detecting ACTUAL support for this! setCss('background:url(https://),url(https://),red url(https://)'); // If the UA supports multiple backgrounds, there should be three occurrences // of the string "url(" in the return value for elemStyle.background return /(url\s*\(.*?){3}/.test(mStyle.background); }; // In testing support for a given CSS property, it's legit to test: // `elem.style[styleName] !== undefined` // If the property is supported it will return an empty string, // if unsupported it will return undefined. // We'll take advantage of this quick test and skip setting a style // on our modernizr element, but instead just testing undefined vs // empty string. tests['backgroundsize'] = function() { return testPropsAll('backgroundSize'); }; tests['borderimage'] = function() { return testPropsAll('borderImage'); }; // Super comprehensive table about all the unique implementations of // border-radius: muddledramblings.com/table-of-css3-border-radius-compliance tests['borderradius'] = function() { return testPropsAll('borderRadius'); }; // WebOS unfortunately false positives on this test. tests['boxshadow'] = function() { return testPropsAll('boxShadow'); }; // FF3.0 will false positive on this test tests['textshadow'] = function() { return document.createElement('div').style.textShadow === ''; }; tests['opacity'] = function() { // Browsers that actually have CSS Opacity implemented have done so // according to spec, which means their return values are within the // range of [0.0,1.0] - including the leading zero. setCssAll('opacity:.55'); // The non-literal . in this regex is intentional: // German Chrome returns this value as 0,55 // github.com/Modernizr/Modernizr/issues/#issue/59/comment/516632 return /^0.55$/.test(mStyle.opacity); }; // Note, Android < 4 will pass this test, but can only animate // a single property at a time // daneden.me/2011/12/putting-up-with-androids-bullshit/ tests['cssanimations'] = function() { return testPropsAll('animationName'); }; tests['csscolumns'] = function() { return testPropsAll('columnCount'); }; tests['cssgradients'] = function() { /** * For CSS Gradients syntax, please see: * webkit.org/blog/175/introducing-css-gradients/ * developer.mozilla.org/en/CSS/-moz-linear-gradient * developer.mozilla.org/en/CSS/-moz-radial-gradient * dev.w3.org/csswg/css3-images/#gradients- */ var str1 = 'background-image:', str2 = 'gradient(linear,left top,right bottom,from(#9f9),to(white));', str3 = 'linear-gradient(left top,#9f9, white);'; setCss( // legacy webkit syntax (FIXME: remove when syntax not in use anymore) (str1 + '-webkit- '.split(' ').join(str2 + str1) // standard syntax // trailing 'background-image:' + prefixes.join(str3 + str1)).slice(0, -str1.length) ); return contains(mStyle.backgroundImage, 'gradient'); }; tests['cssreflections'] = function() { return testPropsAll('boxReflect'); }; tests['csstransforms'] = function() { return !!testPropsAll('transform'); }; tests['csstransforms3d'] = function() { var ret = !!testPropsAll('perspective'); // Webkit's 3D transforms are passed off to the browser's own graphics renderer. // It works fine in Safari on Leopard and Snow Leopard, but not in Chrome in // some conditions. As a result, Webkit typically recognizes the syntax but // will sometimes throw a false positive, thus we must do a more thorough check: if ( ret && 'webkitPerspective' in docElement.style ) { // Webkit allows this media query to succeed only if the feature is enabled. // `@media (transform-3d),(-o-transform-3d),(-moz-transform-3d),(-ms-transform-3d),(-webkit-transform-3d),(modernizr){ ... }` ret = Modernizr['csstransforms3d']; } return ret; }; tests['csstransitions'] = function() { return testPropsAll('transition'); }; /*>>fontface*/ // @font-face detection routine by Diego Perini // javascript.nwbox.com/CSSSupport/ // false positives in WebOS: github.com/Modernizr/Modernizr/issues/342 tests['fontface'] = function() { return Modernizr['fontface']; }; /*>>fontface*/ // CSS generated content detection tests['generatedcontent'] = function() { return Modernizr['generatedcontent']; }; // These tests evaluate support of the video/audio elements, as well as // testing what types of content they support. // // We're using the Boolean constructor here, so that we can extend the value // e.g. Modernizr.video // true // Modernizr.video.ogg // 'probably' // // Codec values from : github.com/NielsLeenheer/html5test/blob/9106a8/index.html#L845 // thx to NielsLeenheer and zcorpan // Note: in some older browsers, "no" was a return value instead of empty string. // It was live in FF3.5.0 and 3.5.1, but fixed in 3.5.2 // It was also live in Safari 4.0.0 - 4.0.4, but fixed in 4.0.5 tests['video'] = function() { var elem = document.createElement('video'), bool = false; // IE9 Running on Windows Server SKU can cause an exception to be thrown, bug #224 try { if ( bool = !!elem.canPlayType ) { bool = new Boolean(bool); bool.ogg = elem.canPlayType('video/ogg; codecs="theora"') .replace(/^no$/,''); bool.h264 = elem.canPlayType('video/mp4; codecs="avc1.42E01E"') .replace(/^no$/,''); bool.webm = elem.canPlayType('video/webm; codecs="vp8, vorbis"').replace(/^no$/,''); } } catch(e) { } return bool; }; tests['audio'] = function() { var elem = document.createElement('audio'), bool = false; try { if ( bool = !!elem.canPlayType ) { bool = new Boolean(bool); bool.ogg = elem.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,''); bool.mp3 = elem.canPlayType('audio/mpeg;') .replace(/^no$/,''); // Mimetypes accepted: // developer.mozilla.org/En/Media_formats_supported_by_the_audio_and_video_elements // bit.ly/iphoneoscodecs bool.wav = elem.canPlayType('audio/wav; codecs="1"') .replace(/^no$/,''); bool.m4a = ( elem.canPlayType('audio/x-m4a;') || elem.canPlayType('audio/aac;')) .replace(/^no$/,''); } } catch(e) { } return bool; }; // In FF4, if disabled, window.localStorage should === null. // Normally, we could not test that directly and need to do a // `('localStorage' in window) && ` test first because otherwise Firefox will // throw bugzil.la/365772 if cookies are disabled // Also in iOS5 Private Browsing mode, attepting to use localStorage.setItem // will throw the exception: // QUOTA_EXCEEDED_ERRROR DOM Exception 22. // Peculiarly, getItem and removeItem calls do not throw. // Because we are forced to try/catch this, we'll go aggressive. // Just FWIW: IE8 Compat mode supports these features completely: // www.quirksmode.org/dom/html5.html // But IE8 doesn't support either with local files tests['localstorage'] = function() { try { localStorage.setItem(mod, mod); localStorage.removeItem(mod); return true; } catch(e) { return false; } }; tests['sessionstorage'] = function() { try { sessionStorage.setItem(mod, mod); sessionStorage.removeItem(mod); return true; } catch(e) { return false; } }; tests['webworkers'] = function() { return !!window.Worker; }; tests['applicationcache'] = function() { return !!window.applicationCache; }; // Thanks to Erik Dahlstrom tests['svg'] = function() { return !!document.createElementNS && !!document.createElementNS(ns.svg, 'svg').createSVGRect; }; // specifically for SVG inline in HTML, not within XHTML // test page: paulirish.com/demo/inline-svg tests['inlinesvg'] = function() { var div = document.createElement('div'); div.innerHTML = '<svg/>'; return (div.firstChild && div.firstChild.namespaceURI) == ns.svg; }; // SVG SMIL animation tests['smil'] = function() { return !!document.createElementNS && /SVGAnimate/.test(toString.call(document.createElementNS(ns.svg, 'animate'))); }; // This test is only for clip paths in SVG proper, not clip paths on HTML content // demo: srufaculty.sru.edu/david.dailey/svg/newstuff/clipPath4.svg // However read the comments to dig into applying SVG clippaths to HTML content here: // github.com/Modernizr/Modernizr/issues/213#issuecomment-1149491 tests['svgclippaths'] = function() { return !!document.createElementNS && /SVGClipPath/.test(toString.call(document.createElementNS(ns.svg, 'clipPath'))); }; // input features and input types go directly onto the ret object, bypassing the tests loop. // Hold this guy to execute in a moment. function webforms() { // Run through HTML5's new input attributes to see if the UA understands any. // We're using f which is the <input> element created early on // Mike Taylr has created a comprehensive resource for testing these attributes // when applied to all input types: // miketaylr.com/code/input-type-attr.html // spec: www.whatwg.org/specs/web-apps/current-work/multipage/the-input-element.html#input-type-attr-summary // Only input placeholder is tested while textarea's placeholder is not. // Currently Safari 4 and Opera 11 have support only for the input placeholder // Both tests are available in feature-detects/forms-placeholder.js Modernizr['input'] = (function( props ) { for ( var i = 0, len = props.length; i < len; i++ ) { attrs[ props[i] ] = !!(props[i] in inputElem); } if (attrs.list){ // safari false positive's on datalist: webk.it/74252 // see also github.com/Modernizr/Modernizr/issues/146 attrs.list = !!(document.createElement('datalist') && window.HTMLDataListElement); } return attrs; })('autocomplete autofocus list placeholder max min multiple pattern required step'.split(' ')); // Run through HTML5's new input types to see if the UA understands any. // This is put behind the tests runloop because it doesn't return a // true/false like all the other tests; instead, it returns an object // containing each input type with its corresponding true/false value // Big thanks to @miketaylr for the html5 forms expertise. miketaylr.com/ Modernizr['inputtypes'] = (function(props) { for ( var i = 0, bool, inputElemType, defaultView, len = props.length; i < len; i++ ) { inputElem.setAttribute('type', inputElemType = props[i]); bool = inputElem.type !== 'text'; // We first check to see if the type we give it sticks.. // If the type does, we feed it a textual value, which shouldn't be valid. // If the value doesn't stick, we know there's input sanitization which infers a custom UI if ( bool ) { inputElem.value = smile; inputElem.style.cssText = 'position:absolute;visibility:hidden;'; if ( /^range$/.test(inputElemType) && inputElem.style.WebkitAppearance !== undefined ) { docElement.appendChild(inputElem); defaultView = document.defaultView; // Safari 2-4 allows the smiley as a value, despite making a slider bool = defaultView.getComputedStyle && defaultView.getComputedStyle(inputElem, null).WebkitAppearance !== 'textfield' && // Mobile android web browser has false positive, so must // check the height to see if the widget is actually there. (inputElem.offsetHeight !== 0); docElement.removeChild(inputElem); } else if ( /^(search|tel)$/.test(inputElemType) ){ // Spec doesnt define any special parsing or detectable UI // behaviors so we pass these through as true // Interestingly, opera fails the earlier test, so it doesn't // even make it here. } else if ( /^(url|email)$/.test(inputElemType) ) { // Real url and email support comes with prebaked validation. bool = inputElem.checkValidity && inputElem.checkValidity() === false; } else if ( /^color$/.test(inputElemType) ) { // chuck into DOM and force reflow for Opera bug in 11.00 // github.com/Modernizr/Modernizr/issues#issue/159 docElement.appendChild(inputElem); docElement.offsetWidth; bool = inputElem.value != smile; docElement.removeChild(inputElem); } else { // If the upgraded input compontent rejects the :) text, we got a winner bool = inputElem.value != smile; } } inputs[ props[i] ] = !!bool; } return inputs; })('search tel url email datetime date month week time datetime-local number range color'.split(' ')); } // End of test definitions // ----------------------- // Run through all tests and detect their support in the current UA. // todo: hypothetically we could be doing an array of tests and use a basic loop here. for ( var feature in tests ) { if ( hasOwnProperty(tests, feature) ) { // run the test, throw the return value into the Modernizr, // then based on that boolean, define an appropriate className // and push it into an array of classes we'll join later. featureName = feature.toLowerCase(); Modernizr[featureName] = tests[feature](); classes.push((Modernizr[featureName] ? '' : 'no-') + featureName); } } // input tests need to run. Modernizr.input || webforms(); /** * addTest allows the user to define their own feature tests * the result will be added onto the Modernizr object, * as well as an appropriate className set on the html element * * @param feature - String naming the feature * @param test - Function returning true if feature is supported, false if not */ Modernizr.addTest = function ( feature, test ) { if ( typeof feature == 'object' ) { for ( var key in feature ) { if ( hasOwnProperty( feature, key ) ) { Modernizr.addTest( key, feature[ key ] ); } } } else { feature = feature.toLowerCase(); if ( Modernizr[feature] !== undefined ) { // we're going to quit if you're trying to overwrite an existing test // if we were to allow it, we'd do this: // var re = new RegExp("\\b(no-)?" + feature + "\\b"); // docElement.className = docElement.className.replace( re, '' ); // but, no rly, stuff 'em. return Modernizr; } test = typeof test == 'function' ? test() : test; docElement.className += ' ' + (test ? '' : 'no-') + feature; Modernizr[feature] = test; } return Modernizr; // allow chaining. }; // Reset modElem.cssText to nothing to reduce memory footprint. setCss(''); modElem = inputElem = null; //>>BEGIN IEPP // Enable HTML 5 elements for styling in IE & add HTML5 css /*! HTML5 Shiv vpre3.4 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed */ ;(function(window, document) { /** Preset options */ var options = window.html5 || {}; /** Used to skip problem elements */ var reSkip = /^<|^(?:button|form|map|select|textarea)$/i; /** Detect whether the browser supports default html5 styles */ var supportsHtml5Styles; /** Detect whether the browser supports unknown elements */ var supportsUnknownElements; (function() { var a = document.createElement('a'); a.innerHTML = '<xyz></xyz>'; //if the hidden property is implemented we can assume, that the browser supports HTML5 Styles supportsHtml5Styles = ('hidden' in a); supportsUnknownElements = a.childNodes.length == 1 || (function() { // assign a false positive if unable to shiv try { (document.createElement)('a'); } catch(e) { return true; } var frag = document.createDocumentFragment(); return ( typeof frag.cloneNode == 'undefined' || typeof frag.createDocumentFragment == 'undefined' || typeof frag.createElement == 'undefined' ); }()); }()); /*--------------------------------------------------------------------------*/ /** * Creates a style sheet with the given CSS text and adds it to the document. * @private * @param {Document} ownerDocument The document. * @param {String} cssText The CSS text. * @returns {StyleSheet} The style element. */ function addStyleSheet(ownerDocument, cssText) { var p = ownerDocument.createElement('p'), parent = ownerDocument.getElementsByTagName('head')[0] || ownerDocument.documentElement; p.innerHTML = 'x<style>' + cssText + '</style>'; return parent.insertBefore(p.lastChild, parent.firstChild); } /** * Returns the value of `html5.elements` as an array. * @private * @returns {Array} An array of shived element node names. */ function getElements() { var elements = html5.elements; return typeof elements == 'string' ? elements.split(' ') : elements; } /** * Shivs the `createElement` and `createDocumentFragment` methods of the document. * @private * @param {Document|DocumentFragment} ownerDocument The document. */ function shivMethods(ownerDocument) { var cache = {}, docCreateElement = ownerDocument.createElement, docCreateFragment = ownerDocument.createDocumentFragment, frag = docCreateFragment(); ownerDocument.createElement = function(nodeName) { // Avoid adding some elements to fragments in IE < 9 because // * Attributes like `name` or `type` cannot be set/changed once an element // is inserted into a document/fragment // * Link elements with `src` attributes that are inaccessible, as with // a 403 response, will cause the tab/window to crash // * Script elements appended to fragments will execute when their `src` // or `text` property is set var node = (cache[nodeName] || (cache[nodeName] = docCreateElement(nodeName))).cloneNode(); return html5.shivMethods && node.canHaveChildren && !reSkip.test(nodeName) ? frag.appendChild(node) : node; }; ownerDocument.createDocumentFragment = Function('h,f', 'return function(){' + 'var n=f.cloneNode(),c=n.createElement;' + 'h.shivMethods&&(' + // unroll the `createElement` calls getElements().join().replace(/\w+/g, function(nodeName) { cache[nodeName] = docCreateElement(nodeName); frag.createElement(nodeName); return 'c("' + nodeName + '")'; }) + ');return n}' )(html5, frag); } /*--------------------------------------------------------------------------*/ /** * Shivs the given document. * @memberOf html5 * @param {Document} ownerDocument The document to shiv. * @returns {Document} The shived document. */ function shivDocument(ownerDocument) { var shived; if (ownerDocument.documentShived) { return ownerDocument; } if (html5.shivCSS && !supportsHtml5Styles) { shived = !!addStyleSheet(ownerDocument, // corrects block display not defined in IE6/7/8/9 'article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}' + // corrects audio display not defined in IE6/7/8/9 'audio{display:none}' + // corrects canvas and video display not defined in IE6/7/8/9 'canvas,video{display:inline-block;*display:inline;*zoom:1}' + // corrects 'hidden' attribute and audio[controls] display not present in IE7/8/9 '[hidden]{display:none}audio[controls]{display:inline-block;*display:inline;*zoom:1}' + // adds styling not present in IE6/7/8/9 'mark{background:#FF0;color:#000}' ); } if (!supportsUnknownElements) { shived = !shivMethods(ownerDocument); } if (shived) { ownerDocument.documentShived = shived; } return ownerDocument; } /*--------------------------------------------------------------------------*/ /** * The `html5` object is exposed so that more elements can be shived and * existing shiving can be detected on iframes. * @type Object * @example * * // options can be changed before the script is included * html5 = { 'elements': 'mark section', 'shivCSS': false, 'shivMethods': false }; */ var html5 = { /** * An array or space separated string of node names of the elements to shiv. * @memberOf html5 * @type Array|String */ 'elements': options.elements || 'abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video', /** * A flag to indicate that the HTML5 style sheet should be inserted. * @memberOf html5 * @type Boolean */ 'shivCSS': !(options.shivCSS === false), /** * A flag to indicate that the document's `createElement` and `createDocumentFragment` * methods should be overwritten. * @memberOf html5 * @type Boolean */ 'shivMethods': !(options.shivMethods === false), /** * A string to describe the type of `html5` object ("default" or "default print"). * @memberOf html5 * @type String */ 'type': 'default', // shivs the document according to the specified `html5` object options 'shivDocument': shivDocument }; /*--------------------------------------------------------------------------*/ // expose html5 window.html5 = html5; // shiv the document shivDocument(document); }(this, document)); //>>END IEPP // Assign private properties to the return object with prefix Modernizr._version = version; // expose these for the plugin API. Look in the source for how to join() them against your input Modernizr._prefixes = prefixes; Modernizr._domPrefixes = domPrefixes; Modernizr._cssomPrefixes = cssomPrefixes; // Modernizr.mq tests a given media query, live against the current state of the window // A few important notes: // * If a browser does not support media queries at all (eg. oldIE) the mq() will always return false // * A max-width or orientation query will be evaluated against the current state, which may change later. // * You must specify values. Eg. If you are testing support for the min-width media query use: // Modernizr.mq('(min-width:0)') // usage: // Modernizr.mq('only screen and (max-width:768)') Modernizr.mq = testMediaQuery; // Modernizr.hasEvent() detects support for a given event, with an optional element to test on // Modernizr.hasEvent('gesturestart', elem) Modernizr.hasEvent = isEventSupported; // Modernizr.testProp() investigates whether a given style property is recognized // Note that the property names must be provided in the camelCase variant. // Modernizr.testProp('pointerEvents') Modernizr.testProp = function(prop){ return testProps([prop]); }; // Modernizr.testAllProps() investigates whether a given style property, // or any of its vendor-prefixed variants, is recognized // Note that the property names must be provided in the camelCase variant. // Modernizr.testAllProps('boxSizing') Modernizr.testAllProps = testPropsAll; // Modernizr.testStyles() allows you to add custom styles to the document and test an element afterwards // Modernizr.testStyles('#modernizr { position:absolute }', function(elem, rule){ ... }) Modernizr.testStyles = injectElementWithStyles; // Modernizr.prefixed() returns the prefixed or nonprefixed property name variant of your input // Modernizr.prefixed('boxSizing') // 'MozBoxSizing' // Properties must be passed as dom-style camelcase, rather than `box-sizing` hypentated style. // Return values will also be the camelCase variant, if you need to translate that to hypenated style use: // // str.replace(/([A-Z])/g, function(str,m1){ return '-' + m1.toLowerCase(); }).replace(/^ms-/,'-ms-'); // If you're trying to ascertain which transition end event to bind to, you might do something like... // // var transEndEventNames = { // 'WebkitTransition' : 'webkitTransitionEnd', // 'MozTransition' : 'transitionend', // 'OTransition' : 'oTransitionEnd', // 'msTransition' : 'MsTransitionEnd', // 'transition' : 'transitionend' // }, // transEndEventName = transEndEventNames[ Modernizr.prefixed('transition') ]; Modernizr.prefixed = function(prop, obj, elem){ if(!obj) { return testPropsAll(prop, 'pfx'); } else { // Testing DOM property e.g. Modernizr.prefixed('requestAnimationFrame', window) // 'mozRequestAnimationFrame' return testPropsAll(prop, obj, elem); } }; // Remove "no-js" class from <html> element, if it exists: docElement.className = docElement.className.replace(/(^|\s)no-js(\s|$)/, '$1$2') + // Add the new classes to the <html> element. (enableClasses ? ' js ' + classes.join(' ') : ''); return Modernizr; })(this, this.document);
fix issue #332 and issue #501
modernizr.js
fix issue #332 and issue #501
<ide><path>odernizr.js <ide> fakeBody.innerHTML += style; <ide> fakeBody.appendChild(div); <ide> if(!body){ <add> //avoid crashing IE8, if background image is used <add> fakeBody.style.background = ""; <ide> docElement.appendChild(fakeBody); <ide> } <ide>
Java
apache-2.0
fa8265e325c27bd23661e63927a37b439715937b
0
virajs/selenium-1,winhamwr/selenium,winhamwr/selenium,virajs/selenium-1,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,winhamwr/selenium,winhamwr/selenium,winhamwr/selenium,virajs/selenium-1,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,virajs/selenium-1,virajs/selenium-1
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import org.junit.Test; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.JavascriptEnabled; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; import static org.openqa.selenium.testing.Ignore.Driver.CHROME; import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; import static org.openqa.selenium.testing.Ignore.Driver.OPERA; import static org.openqa.selenium.testing.Ignore.Driver.SELENESE; import static org.openqa.selenium.testing.TestUtilities.isOldIe; import static org.openqa.selenium.TestWaiter.waitFor; import static org.openqa.selenium.WaitingConditions.pageTitleToBe; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import java.util.List; import java.util.concurrent.Callable; public class ElementFindingTest extends JUnit4TestBase { @Test public void testShouldReturnTitleOfPageIfSet() { driver.get(pages.xhtmlTestPage); assertThat(driver.getTitle(), equalTo(("XHTML Test Page"))); driver.get(pages.simpleTestPage); assertThat(driver.getTitle(), equalTo("Hello WebDriver")); } @Test public void testShouldNotBeAbleToLocateASingleElementThatDoesNotExist() { driver.get(pages.formPage); try { driver.findElement(By.id("nonExistantButton")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldBeAbleToClickOnLinkIdentifiedByText() { driver.get(pages.xhtmlTestPage); driver.findElement(By.linkText("click me")).click(); waitFor(pageTitleToBe(driver, "We Arrive Here")); assertThat(driver.getTitle(), equalTo("We Arrive Here")); } @Test public void testDriverShouldBeAbleToFindElementsAfterLoadingMoreThanOnePageAtATime() { driver.get(pages.formPage); driver.get(pages.xhtmlTestPage); driver.findElement(By.linkText("click me")).click(); waitFor(pageTitleToBe(driver, "We Arrive Here")); assertThat(driver.getTitle(), equalTo("We Arrive Here")); } @Test public void testshouldBeAbleToClickOnLinkIdentifiedById() { driver.get(pages.xhtmlTestPage); driver.findElement(By.id("linkId")).click(); waitFor(pageTitleToBe(driver, "We Arrive Here")); assertThat(driver.getTitle(), equalTo("We Arrive Here")); } @Test public void testShouldThrowAnExceptionWhenThereIsNoLinkToClickAndItIsFoundWithLinkText() { driver.get(pages.xhtmlTestPage); try { driver.findElement(By.linkText("Not here either")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldfindAnElementBasedOnId() { driver.get(pages.formPage); WebElement element = driver.findElement(By.id("checky")); assertThat(element.isSelected(), is(false)); } @Test public void testShouldNotBeAbleTofindElementsBasedOnIdIfTheElementIsNotThere() { driver.get(pages.formPage); try { driver.findElement(By.id("notThere")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldBeAbleToFindChildrenOfANode() { driver.get(pages.selectableItemsPage); List<WebElement> elements = driver.findElements(By.xpath("/html/head")); WebElement head = elements.get(0); List<WebElement> importedScripts = head.findElements(By.tagName("script")); assertThat(importedScripts.size(), equalTo(3)); } @Test public void testReturnAnEmptyListWhenThereAreNoChildrenOfANode() { driver.get(pages.xhtmlTestPage); WebElement table = driver.findElement(By.id("table")); List<WebElement> rows = table.findElements(By.tagName("tr")); assertThat(rows.size(), equalTo(0)); } @Ignore(value = SELENESE, reason = "Value returned as 'off'") @Test public void testShouldFindElementsByName() { driver.get(pages.formPage); WebElement element = driver.findElement(By.name("checky")); assertThat(element.getAttribute("value"), is("furrfu")); } @Test public void testShouldFindElementsByClass() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("extraDiv")); assertTrue(element.getText().startsWith("Another div starts here.")); } @Test public void testShouldFindElementsByClassWhenItIsTheFirstNameAmongMany() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("nameA")); assertThat(element.getText(), equalTo("An H2 title")); } @Test public void testShouldFindElementsByClassWhenItIsTheLastNameAmongMany() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("nameC")); assertThat(element.getText(), equalTo("An H2 title")); } @Test public void testShouldFindElementsByClassWhenItIsInTheMiddleAmongMany() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("nameBnoise")); assertThat(element.getText(), equalTo("An H2 title")); } @Test public void testShouldFindElementByClassWhenItsNameIsSurroundedByWhitespace() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("spaceAround")); assertThat(element.getText(), equalTo("Spaced out")); } @Test public void testShouldFindElementsByClassWhenItsNameIsSurroundedByWhitespace() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.className("spaceAround")); assertThat(elements.size(), equalTo(1)); assertThat(elements.get(0).getText(), equalTo("Spaced out")); } @Test public void testShouldNotFindElementsByClassWhenTheNameQueriedIsShorterThanCandidateName() { driver.get(pages.xhtmlTestPage); try { driver.findElement(By.className("nameB")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldBeAbleToFindMultipleElementsByXPath() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.xpath("//div")); assertTrue(elements.size() > 1); } @Test public void testShouldBeAbleToFindMultipleElementsByLinkText() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.linkText("click me")); assertTrue("Expected 2 links, got " + elements.size(), elements.size() == 2); } @Test public void testShouldBeAbleToFindMultipleElementsByPartialLinkText() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.partialLinkText("ick me")); assertTrue(elements.size() == 2); } @Test public void testShouldBeAbleToFindElementByPartialLinkText() { driver.get(pages.xhtmlTestPage); try { driver.findElement(By.partialLinkText("anon")); } catch (NoSuchElementException e) { fail("Expected element to be found"); } } @Test public void testShouldFindElementByLinkTextContainingEqualsSign() { driver.get(pages.xhtmlTestPage); try { WebElement element = driver.findElement(By.linkText("Link=equalssign")); assertEquals("linkWithEqualsSign", element.getAttribute("id")); } catch (NoSuchElementException e) { fail("Expected element to be found"); } } @Test public void testShouldFindElementByPartialLinkTextContainingEqualsSign() { driver.get(pages.xhtmlTestPage); try { WebElement element = driver.findElement(By.partialLinkText("Link=")); assertEquals("linkWithEqualsSign", element.getAttribute("id")); } catch (NoSuchElementException e) { fail("Expected element to be found"); } } @Test public void testShouldFindElementsByLinkTextContainingEqualsSign() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.linkText("Link=equalssign")); assertEquals(1, elements.size()); assertEquals("linkWithEqualsSign", elements.get(0).getAttribute("id")); } @Test public void testShouldFindElementsByPartialLinkTextContainingEqualsSign() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.partialLinkText("Link=")); assertEquals(1, elements.size()); assertEquals("linkWithEqualsSign", elements.get(0).getAttribute("id")); } @Test public void testShouldBeAbleToFindMultipleElementsByName() { driver.get(pages.nestedPage); List<WebElement> elements = driver.findElements(By.name("checky")); assertTrue(elements.size() > 1); } @Ignore(value = ANDROID, reason = "Bug in Android's XPath library.") @Test public void testShouldBeAbleToFindMultipleElementsById() { driver.get(pages.nestedPage); List<WebElement> elements = driver.findElements(By.id("2")); assertEquals(8, elements.size()); } @Test public void testShouldBeAbleToFindMultipleElementsByClassName() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.className("nameC")); assertTrue(elements.size() > 1); } // You don't want to ask why this is here @Test public void testWhenFindingByNameShouldNotReturnById() { driver.get(pages.formPage); WebElement element = driver.findElement(By.name("id-name1")); assertThat(element.getAttribute("value"), is("name")); element = driver.findElement(By.id("id-name1")); assertThat(element.getAttribute("value"), is("id")); element = driver.findElement(By.name("id-name2")); assertThat(element.getAttribute("value"), is("name")); element = driver.findElement(By.id("id-name2")); assertThat(element.getAttribute("value"), is("id")); } @Test public void testShouldFindGrandChildren() { driver.get(pages.formPage); WebElement form = driver.findElement(By.id("nested_form")); form.findElement(By.name("x")); } @Test public void testShouldNotFindElementOutSideTree() { driver.get(pages.formPage); WebElement element = driver.findElement(By.name("login")); try { element.findElement(By.name("x")); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldReturnElementsThatDoNotSupportTheNameProperty() { driver.get(pages.nestedPage); driver.findElement(By.name("div1")); // If this works, we're all good } @Test public void testShouldFindHiddenElementsByName() { driver.get(pages.formPage); try { driver.findElement(By.name("hidden")); } catch (NoSuchElementException e) { fail("Expected to be able to find hidden element"); } } @Test public void testShouldfindAnElementBasedOnTagName() { driver.get(pages.formPage); WebElement element = driver.findElement(By.tagName("input")); assertNotNull(element); } @Test public void testShouldfindElementsBasedOnTagName() { driver.get(pages.formPage); List<WebElement> elements = driver.findElements(By.tagName("input")); assertNotNull(elements); } @Test public void testFindingByCompoundClassNameIsAnError() { driver.get(pages.xhtmlTestPage); try { driver.findElement(By.className("a b")); fail("Compound class names aren't allowed"); } catch (IllegalLocatorException e) { // This is expected } try { driver.findElements(By.className("a b")); fail("Compound class names aren't allowed"); } catch (IllegalLocatorException e) { // This is expected } } @JavascriptEnabled @Test public void testShouldBeAbleToClickOnLinksWithNoHrefAttribute() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.linkText("No href")); element.click(); // if any exception is thrown, we won't get this far. Sanity check waitFor(pageTitleToBe(driver, "Changed")); assertEquals("Changed", driver.getTitle()); } @Ignore({SELENESE}) @Test public void testShouldNotBeAbleToFindAnElementOnABlankPage() { driver.get("about:blank"); try { // Search for anything. This used to cause an IllegalStateException in IE. driver.findElement(By.tagName("a")); fail("Should not have been able to find a link"); } catch (NoSuchElementException e) { // this is expected } } @Ignore({IPHONE}) @NeedsFreshDriver @Test public void testShouldNotBeAbleToLocateASingleElementOnABlankPage() { // Note we're on the default start page for the browser at this point. try { driver.findElement(By.id("nonExistantButton")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @JavascriptEnabled @Test public void testRemovingAnElementDynamicallyFromTheDomShouldCauseAStaleRefException() { driver.get(pages.javascriptPage); WebElement toBeDeleted = driver.findElement(By.id("deleted")); assertTrue(toBeDeleted.isDisplayed()); driver.findElement(By.id("delete")).click(); boolean wasStale = waitFor(elementToBeStale(toBeDeleted)); assertTrue("Element should be stale at this point", wasStale); } private Callable<Boolean> elementToBeStale(final WebElement element) { return new Callable<Boolean>() { public Boolean call() throws Exception { try { element.isDisplayed(); return false; } catch (StaleElementReferenceException e) { return true; } } }; } @Test public void testFindingALinkByXpathUsingContainsKeywordShouldWork() { driver.get(pages.nestedPage); try { driver.findElement(By.xpath("//a[contains(.,'hello world')]")); } catch (Exception e) { fail("Should not have thrown an exception"); } } @JavascriptEnabled @Test public void testShouldBeAbleToFindAnElementByCssSelector() { driver.get(pages.xhtmlTestPage); driver.findElement(By.cssSelector("div.content")); } @JavascriptEnabled @Test public void testShouldBeAbleToFindAnElementsByCssSelector() { driver.get(pages.xhtmlTestPage); driver.findElements(By.cssSelector("p")); } @Test public void testFindingByTagNameShouldNotIncludeParentElementIfSameTagType() { driver.get(pages.xhtmlTestPage); WebElement parent = driver.findElement(By.id("my_span")); assertEquals(2, parent.findElements(By.tagName("div")).size()); assertEquals(2, parent.findElements(By.tagName("span")).size()); } @Test public void testFindingByCssShouldNotIncludeParentElementIfSameTagType() { driver.get(pages.xhtmlTestPage); WebElement parent = driver.findElement(By.cssSelector("div#parent")); WebElement child = parent.findElement(By.cssSelector("div")); assertEquals("child", child.getAttribute("id")); } // TODO(danielwh): Add extensive CSS selector tests @Ignore(value = {SELENESE, OPERA}, reason = "Just not working") @Test public void testAnElementFoundInADifferentFrameIsStale() { driver.get(appServer.whereIs("missedJsReference.html")); driver.switchTo().frame("inner"); WebElement element = driver.findElement(By.id("oneline")); driver.switchTo().defaultContent(); try { element.getText(); fail("Expected exception"); } catch (StaleElementReferenceException expected) { // Expected } } @JavascriptEnabled @Ignore({ANDROID, IPHONE, OPERA, SELENESE}) @Test public void testAnElementFoundInADifferentFrameViaJsCanBeUsed() { String url = appServer.whereIs("missedJsReference.html"); driver.get(url); try { driver.switchTo().frame("inner"); WebElement first = driver.findElement(By.id("oneline")); driver.switchTo().defaultContent(); WebElement element = (WebElement) ((JavascriptExecutor) driver).executeScript( "return frames[0].document.getElementById('oneline');"); driver.switchTo().frame("inner"); WebElement second = driver.findElement(By.id("oneline")); assertEquals(first, element); assertEquals(second, element); } finally { driver.switchTo().defaultContent(); } } @Test @Ignore({CHROME, OPERA}) public void findsByLinkTextOnXhtmlPage() { if (isOldIe(driver)) { // Old IE doesn't render XHTML pages, don't try loading XHTML pages in it return; } driver.get(appServer.whereIs("actualXhtmlPage.xhtml")); String linkText = "Foo"; WebElement element = driver.findElement(By.linkText(linkText)); assertEquals(linkText, element.getText()); } }
java/client/test/org/openqa/selenium/ElementFindingTest.java
/* Copyright 2007-2009 WebDriver committers Copyright 2007-2009 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium; import org.junit.Test; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.JavascriptEnabled; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; import static org.openqa.selenium.testing.Ignore.Driver.OPERA; import static org.openqa.selenium.testing.Ignore.Driver.SELENESE; import static org.openqa.selenium.testing.TestUtilities.isOldIe; import static org.openqa.selenium.TestWaiter.waitFor; import static org.openqa.selenium.WaitingConditions.pageTitleToBe; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import java.util.List; import java.util.concurrent.Callable; public class ElementFindingTest extends JUnit4TestBase { @Test public void testShouldReturnTitleOfPageIfSet() { driver.get(pages.xhtmlTestPage); assertThat(driver.getTitle(), equalTo(("XHTML Test Page"))); driver.get(pages.simpleTestPage); assertThat(driver.getTitle(), equalTo("Hello WebDriver")); } @Test public void testShouldNotBeAbleToLocateASingleElementThatDoesNotExist() { driver.get(pages.formPage); try { driver.findElement(By.id("nonExistantButton")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldBeAbleToClickOnLinkIdentifiedByText() { driver.get(pages.xhtmlTestPage); driver.findElement(By.linkText("click me")).click(); waitFor(pageTitleToBe(driver, "We Arrive Here")); assertThat(driver.getTitle(), equalTo("We Arrive Here")); } @Test public void testDriverShouldBeAbleToFindElementsAfterLoadingMoreThanOnePageAtATime() { driver.get(pages.formPage); driver.get(pages.xhtmlTestPage); driver.findElement(By.linkText("click me")).click(); waitFor(pageTitleToBe(driver, "We Arrive Here")); assertThat(driver.getTitle(), equalTo("We Arrive Here")); } @Test public void testshouldBeAbleToClickOnLinkIdentifiedById() { driver.get(pages.xhtmlTestPage); driver.findElement(By.id("linkId")).click(); waitFor(pageTitleToBe(driver, "We Arrive Here")); assertThat(driver.getTitle(), equalTo("We Arrive Here")); } @Test public void testShouldThrowAnExceptionWhenThereIsNoLinkToClickAndItIsFoundWithLinkText() { driver.get(pages.xhtmlTestPage); try { driver.findElement(By.linkText("Not here either")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldfindAnElementBasedOnId() { driver.get(pages.formPage); WebElement element = driver.findElement(By.id("checky")); assertThat(element.isSelected(), is(false)); } @Test public void testShouldNotBeAbleTofindElementsBasedOnIdIfTheElementIsNotThere() { driver.get(pages.formPage); try { driver.findElement(By.id("notThere")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldBeAbleToFindChildrenOfANode() { driver.get(pages.selectableItemsPage); List<WebElement> elements = driver.findElements(By.xpath("/html/head")); WebElement head = elements.get(0); List<WebElement> importedScripts = head.findElements(By.tagName("script")); assertThat(importedScripts.size(), equalTo(3)); } @Test public void testReturnAnEmptyListWhenThereAreNoChildrenOfANode() { driver.get(pages.xhtmlTestPage); WebElement table = driver.findElement(By.id("table")); List<WebElement> rows = table.findElements(By.tagName("tr")); assertThat(rows.size(), equalTo(0)); } @Ignore(value = SELENESE, reason = "Value returned as 'off'") @Test public void testShouldFindElementsByName() { driver.get(pages.formPage); WebElement element = driver.findElement(By.name("checky")); assertThat(element.getAttribute("value"), is("furrfu")); } @Test public void testShouldFindElementsByClass() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("extraDiv")); assertTrue(element.getText().startsWith("Another div starts here.")); } @Test public void testShouldFindElementsByClassWhenItIsTheFirstNameAmongMany() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("nameA")); assertThat(element.getText(), equalTo("An H2 title")); } @Test public void testShouldFindElementsByClassWhenItIsTheLastNameAmongMany() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("nameC")); assertThat(element.getText(), equalTo("An H2 title")); } @Test public void testShouldFindElementsByClassWhenItIsInTheMiddleAmongMany() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("nameBnoise")); assertThat(element.getText(), equalTo("An H2 title")); } @Test public void testShouldFindElementByClassWhenItsNameIsSurroundedByWhitespace() { driver.get(pages.xhtmlTestPage); WebElement element = driver.findElement(By.className("spaceAround")); assertThat(element.getText(), equalTo("Spaced out")); } @Test public void testShouldFindElementsByClassWhenItsNameIsSurroundedByWhitespace() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.className("spaceAround")); assertThat(elements.size(), equalTo(1)); assertThat(elements.get(0).getText(), equalTo("Spaced out")); } @Test public void testShouldNotFindElementsByClassWhenTheNameQueriedIsShorterThanCandidateName() { driver.get(pages.xhtmlTestPage); try { driver.findElement(By.className("nameB")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldBeAbleToFindMultipleElementsByXPath() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.xpath("//div")); assertTrue(elements.size() > 1); } @Test public void testShouldBeAbleToFindMultipleElementsByLinkText() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.linkText("click me")); assertTrue("Expected 2 links, got " + elements.size(), elements.size() == 2); } @Test public void testShouldBeAbleToFindMultipleElementsByPartialLinkText() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.partialLinkText("ick me")); assertTrue(elements.size() == 2); } @Test public void testShouldBeAbleToFindElementByPartialLinkText() { driver.get(pages.xhtmlTestPage); try { driver.findElement(By.partialLinkText("anon")); } catch (NoSuchElementException e) { fail("Expected element to be found"); } } @Test public void testShouldFindElementByLinkTextContainingEqualsSign() { driver.get(pages.xhtmlTestPage); try { WebElement element = driver.findElement(By.linkText("Link=equalssign")); assertEquals("linkWithEqualsSign", element.getAttribute("id")); } catch (NoSuchElementException e) { fail("Expected element to be found"); } } @Test public void testShouldFindElementByPartialLinkTextContainingEqualsSign() { driver.get(pages.xhtmlTestPage); try { WebElement element = driver.findElement(By.partialLinkText("Link=")); assertEquals("linkWithEqualsSign", element.getAttribute("id")); } catch (NoSuchElementException e) { fail("Expected element to be found"); } } @Test public void testShouldFindElementsByLinkTextContainingEqualsSign() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.linkText("Link=equalssign")); assertEquals(1, elements.size()); assertEquals("linkWithEqualsSign", elements.get(0).getAttribute("id")); } @Test public void testShouldFindElementsByPartialLinkTextContainingEqualsSign() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.partialLinkText("Link=")); assertEquals(1, elements.size()); assertEquals("linkWithEqualsSign", elements.get(0).getAttribute("id")); } @Test public void testShouldBeAbleToFindMultipleElementsByName() { driver.get(pages.nestedPage); List<WebElement> elements = driver.findElements(By.name("checky")); assertTrue(elements.size() > 1); } @Ignore(value = ANDROID, reason = "Bug in Android's XPath library.") @Test public void testShouldBeAbleToFindMultipleElementsById() { driver.get(pages.nestedPage); List<WebElement> elements = driver.findElements(By.id("2")); assertEquals(8, elements.size()); } @Test public void testShouldBeAbleToFindMultipleElementsByClassName() { driver.get(pages.xhtmlTestPage); List<WebElement> elements = driver.findElements(By.className("nameC")); assertTrue(elements.size() > 1); } // You don't want to ask why this is here @Test public void testWhenFindingByNameShouldNotReturnById() { driver.get(pages.formPage); WebElement element = driver.findElement(By.name("id-name1")); assertThat(element.getAttribute("value"), is("name")); element = driver.findElement(By.id("id-name1")); assertThat(element.getAttribute("value"), is("id")); element = driver.findElement(By.name("id-name2")); assertThat(element.getAttribute("value"), is("name")); element = driver.findElement(By.id("id-name2")); assertThat(element.getAttribute("value"), is("id")); } @Test public void testShouldFindGrandChildren() { driver.get(pages.formPage); WebElement form = driver.findElement(By.id("nested_form")); form.findElement(By.name("x")); } @Test public void testShouldNotFindElementOutSideTree() { driver.get(pages.formPage); WebElement element = driver.findElement(By.name("login")); try { element.findElement(By.name("x")); } catch (NoSuchElementException e) { // this is expected } } @Test public void testShouldReturnElementsThatDoNotSupportTheNameProperty() { driver.get(pages.nestedPage); driver.findElement(By.name("div1")); // If this works, we're all good } @Test public void testShouldFindHiddenElementsByName() { driver.get(pages.formPage); try { driver.findElement(By.name("hidden")); } catch (NoSuchElementException e) { fail("Expected to be able to find hidden element"); } } @Test public void testShouldfindAnElementBasedOnTagName() { driver.get(pages.formPage); WebElement element = driver.findElement(By.tagName("input")); assertNotNull(element); } @Test public void testShouldfindElementsBasedOnTagName() { driver.get(pages.formPage); List<WebElement> elements = driver.findElements(By.tagName("input")); assertNotNull(elements); } @Test public void testFindingByCompoundClassNameIsAnError() { driver.get(pages.xhtmlTestPage); try { driver.findElement(By.className("a b")); fail("Compound class names aren't allowed"); } catch (IllegalLocatorException e) { // This is expected } try { driver.findElements(By.className("a b")); fail("Compound class names aren't allowed"); } catch (IllegalLocatorException e) { // This is expected } } @JavascriptEnabled @Test public void testShouldBeAbleToClickOnLinksWithNoHrefAttribute() { driver.get(pages.javascriptPage); WebElement element = driver.findElement(By.linkText("No href")); element.click(); // if any exception is thrown, we won't get this far. Sanity check waitFor(pageTitleToBe(driver, "Changed")); assertEquals("Changed", driver.getTitle()); } @Ignore({SELENESE}) @Test public void testShouldNotBeAbleToFindAnElementOnABlankPage() { driver.get("about:blank"); try { // Search for anything. This used to cause an IllegalStateException in IE. driver.findElement(By.tagName("a")); fail("Should not have been able to find a link"); } catch (NoSuchElementException e) { // this is expected } } @Ignore({IPHONE}) @NeedsFreshDriver @Test public void testShouldNotBeAbleToLocateASingleElementOnABlankPage() { // Note we're on the default start page for the browser at this point. try { driver.findElement(By.id("nonExistantButton")); fail("Should not have succeeded"); } catch (NoSuchElementException e) { // this is expected } } @JavascriptEnabled @Test public void testRemovingAnElementDynamicallyFromTheDomShouldCauseAStaleRefException() { driver.get(pages.javascriptPage); WebElement toBeDeleted = driver.findElement(By.id("deleted")); assertTrue(toBeDeleted.isDisplayed()); driver.findElement(By.id("delete")).click(); boolean wasStale = waitFor(elementToBeStale(toBeDeleted)); assertTrue("Element should be stale at this point", wasStale); } private Callable<Boolean> elementToBeStale(final WebElement element) { return new Callable<Boolean>() { public Boolean call() throws Exception { try { element.isDisplayed(); return false; } catch (StaleElementReferenceException e) { return true; } } }; } @Test public void testFindingALinkByXpathUsingContainsKeywordShouldWork() { driver.get(pages.nestedPage); try { driver.findElement(By.xpath("//a[contains(.,'hello world')]")); } catch (Exception e) { fail("Should not have thrown an exception"); } } @JavascriptEnabled @Test public void testShouldBeAbleToFindAnElementByCssSelector() { driver.get(pages.xhtmlTestPage); driver.findElement(By.cssSelector("div.content")); } @JavascriptEnabled @Test public void testShouldBeAbleToFindAnElementsByCssSelector() { driver.get(pages.xhtmlTestPage); driver.findElements(By.cssSelector("p")); } @Test public void testFindingByTagNameShouldNotIncludeParentElementIfSameTagType() { driver.get(pages.xhtmlTestPage); WebElement parent = driver.findElement(By.id("my_span")); assertEquals(2, parent.findElements(By.tagName("div")).size()); assertEquals(2, parent.findElements(By.tagName("span")).size()); } @Test public void testFindingByCssShouldNotIncludeParentElementIfSameTagType() { driver.get(pages.xhtmlTestPage); WebElement parent = driver.findElement(By.cssSelector("div#parent")); WebElement child = parent.findElement(By.cssSelector("div")); assertEquals("child", child.getAttribute("id")); } // TODO(danielwh): Add extensive CSS selector tests @Ignore(value = {SELENESE, OPERA}, reason = "Just not working") @Test public void testAnElementFoundInADifferentFrameIsStale() { driver.get(appServer.whereIs("missedJsReference.html")); driver.switchTo().frame("inner"); WebElement element = driver.findElement(By.id("oneline")); driver.switchTo().defaultContent(); try { element.getText(); fail("Expected exception"); } catch (StaleElementReferenceException expected) { // Expected } } @JavascriptEnabled @Ignore({ANDROID, IPHONE, OPERA, SELENESE}) @Test public void testAnElementFoundInADifferentFrameViaJsCanBeUsed() { String url = appServer.whereIs("missedJsReference.html"); driver.get(url); try { driver.switchTo().frame("inner"); WebElement first = driver.findElement(By.id("oneline")); driver.switchTo().defaultContent(); WebElement element = (WebElement) ((JavascriptExecutor) driver).executeScript( "return frames[0].document.getElementById('oneline');"); driver.switchTo().frame("inner"); WebElement second = driver.findElement(By.id("oneline")); assertEquals(first, element); assertEquals(second, element); } finally { driver.switchTo().defaultContent(); } } @Test @Ignore(OPERA) public void findsByLinkTextOnXhtmlPage() { if (isOldIe(driver)) { // Old IE doesn't render XHTML pages, don't try loading XHTML pages in it return; } driver.get(appServer.whereIs("actualXhtmlPage.xhtml")); String linkText = "Foo"; WebElement element = driver.findElement(By.linkText(linkText)); assertEquals(linkText, element.getText()); } }
DanielWagnerHall: This test doesn't pass on Chrome yet git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@16280 07704840-8298-11de-bf8c-fd130f914ac9
java/client/test/org/openqa/selenium/ElementFindingTest.java
DanielWagnerHall: This test doesn't pass on Chrome yet
<ide><path>ava/client/test/org/openqa/selenium/ElementFindingTest.java <ide> import static org.junit.Assert.assertTrue; <ide> import static org.junit.Assert.fail; <ide> import static org.openqa.selenium.testing.Ignore.Driver.ANDROID; <add>import static org.openqa.selenium.testing.Ignore.Driver.CHROME; <ide> import static org.openqa.selenium.testing.Ignore.Driver.IPHONE; <ide> import static org.openqa.selenium.testing.Ignore.Driver.OPERA; <ide> import static org.openqa.selenium.testing.Ignore.Driver.SELENESE; <ide> } <ide> <ide> @Test <del> @Ignore(OPERA) <add> @Ignore({CHROME, OPERA}) <ide> public void findsByLinkTextOnXhtmlPage() { <ide> if (isOldIe(driver)) { <ide> // Old IE doesn't render XHTML pages, don't try loading XHTML pages in it
Java
apache-2.0
810facf5855c5928d692c971339f1634c6fc98d9
0
hankcs/HanLP,YangliAtGitHub/HanLP,hankcs/HanLP,iLiXin/HanLP,xunyuw/HanLP,furaoing/HanLP,waiteryee1/HanLP,chenjun0210/HanLP
/* * <summary></summary> * <author>He Han</author> * <email>[email protected]</email> * <create-date>2014/12/10 13:44</create-date> * * <copyright file="CRFSegment.java" company="上海林原信息科技有限公司"> * Copyright (c) 2003-2014, 上海林原信息科技有限公司. All Right Reserved, http://www.linrunsoft.com/ * This source is subject to the LinrunSpace License. Please contact 上海林原信息科技有限公司 to get more information. * </copyright> */ package com.hankcs.hanlp.seg.CRF; import com.hankcs.hanlp.HanLP; import com.hankcs.hanlp.algoritm.Viterbi; import com.hankcs.hanlp.corpus.tag.Nature; import com.hankcs.hanlp.dictionary.CoreDictionary; import com.hankcs.hanlp.dictionary.CoreDictionaryTransformMatrixDictionary; import com.hankcs.hanlp.model.CRFSegmentModel; import com.hankcs.hanlp.model.crf.Table; import com.hankcs.hanlp.seg.CharacterBasedGenerativeModelSegment; import com.hankcs.hanlp.seg.Segment; import com.hankcs.hanlp.seg.common.Term; import com.hankcs.hanlp.seg.common.Vertex; import com.hankcs.hanlp.utility.CharacterHelper; import java.io.FileInputStream; import java.io.ObjectInputStream; import java.util.*; import static com.hankcs.hanlp.utility.Predefine.logger; /** * 基于CRF的分词器 * * @author hankcs */ public class CRFSegment extends CharacterBasedGenerativeModelSegment { @Override protected List<Term> segSentence(char[] sentence) { if (sentence.length == 0) return Collections.emptyList(); char[] sentenceConverted = CharTable.convert(sentence); Table table = new Table(); table.v = atomSegmentToTable(sentenceConverted); CRFSegmentModel.crfModel.tag(table); List<Term> termList = new LinkedList<Term>(); if (HanLP.Config.DEBUG) { System.out.println("CRF标注结果"); System.out.println(table); } int offset = 0; OUTER: for (int i = 0; i < table.v.length; offset += table.v[i][1].length(), ++i) { String[] line = table.v[i]; switch (line[2].charAt(0)) { case 'B': { int begin = offset; while (table.v[i][2].charAt(0) != 'E') { offset += table.v[i][1].length(); ++i; if (i == table.v.length) { break; } } if (i == table.v.length) { termList.add(new Term(new String(sentence, begin, offset - begin), null)); break OUTER; } else termList.add(new Term(new String(sentence, begin, offset - begin + table.v[i][1].length()), null)); } break; default: { termList.add(new Term(new String(sentence, offset, table.v[i][1].length()), null)); } break; } } if (config.speechTagging) { List<Vertex> vertexList = toVertexList(termList, true); Viterbi.compute(vertexList, CoreDictionaryTransformMatrixDictionary.transformMatrixDictionary); int i = 0; for (Term term : termList) { if (term.nature != null) term.nature = vertexList.get(i + 1).getNature(); ++i; } } if (config.useCustomDictionary) { List<Vertex> vertexList = toVertexList(termList, false); combineByCustomDictionary(vertexList); termList = toTermList(vertexList, config.offset); } return termList; } private static List<Vertex> toVertexList(List<Term> termList, boolean appendStart) { ArrayList<Vertex> vertexList = new ArrayList<Vertex>(termList.size() + 1); if (appendStart) vertexList.add(Vertex.B); for (Term term : termList) { CoreDictionary.Attribute attribute = CoreDictionary.get(term.word); if (attribute == null) attribute = new CoreDictionary.Attribute(Nature.nz); else term.nature = attribute.nature[0]; Vertex vertex = new Vertex(term.word, attribute); vertexList.add(vertex); } return vertexList; } /** * 将一条路径转为最终结果 * * @param vertexList * @param offsetEnabled 是否计算offset * @return */ protected static List<Term> toTermList(List<Vertex> vertexList, boolean offsetEnabled) { assert vertexList != null; int length = vertexList.size(); List<Term> resultList = new ArrayList<Term>(length); Iterator<Vertex> iterator = vertexList.iterator(); if (offsetEnabled) { int offset = 0; for (int i = 0; i < length; ++i) { Vertex vertex = iterator.next(); Term term = convert(vertex); term.offset = offset; offset += term.length(); resultList.add(term); } } else { for (int i = 0; i < length; ++i) { Vertex vertex = iterator.next(); Term term = convert(vertex); resultList.add(term); } } return resultList; } /** * 将节点转为term * * @param vertex * @return */ private static Term convert(Vertex vertex) { return new Term(vertex.realWord, vertex.guessNature()); } public static List<String> atomSegment(char[] sentence) { List<String> atomList = new ArrayList<String>(sentence.length); final int maxLen = sentence.length - 1; final StringBuilder sbAtom = new StringBuilder(); out: for (int i = 0; i < sentence.length; i++) { if (sentence[i] >= '0' && sentence[i] <= '9') { sbAtom.append(sentence[i]); if (i == maxLen) { atomList.add(sbAtom.toString()); sbAtom.setLength(0); break; } char c = sentence[++i]; while (c == '.' || c == '%' || (c >= '0' && c <= '9')) { sbAtom.append(sentence[i]); if (i == maxLen) { atomList.add(sbAtom.toString()); sbAtom.setLength(0); break out; } c = sentence[++i]; } atomList.add(sbAtom.toString()); sbAtom.setLength(0); --i; } else if (CharacterHelper.isEnglishLetter(sentence[i])) { sbAtom.append(sentence[i]); if (i == maxLen) { atomList.add(sbAtom.toString()); sbAtom.setLength(0); break; } char c = sentence[++i]; while (CharacterHelper.isEnglishLetter(c)) { sbAtom.append(sentence[i]); if (i == maxLen) { atomList.add(sbAtom.toString()); sbAtom.setLength(0); break out; } c = sentence[++i]; } atomList.add(sbAtom.toString()); sbAtom.setLength(0); --i; } else { atomList.add(String.valueOf(sentence[i])); } } return atomList; } public static String[][] atomSegmentToTable(char[] sentence) { String table[][] = new String[sentence.length][3]; int size = 0; final int maxLen = sentence.length - 1; final StringBuilder sbAtom = new StringBuilder(); out: for (int i = 0; i < sentence.length; i++) { if (sentence[i] >= '0' && sentence[i] <= '9') { sbAtom.append(sentence[i]); if (i == maxLen) { table[size][0] = "M"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); break; } char c = sentence[++i]; while (c == '.' || c == '%' || (c >= '0' && c <= '9')) { sbAtom.append(sentence[i]); if (i == maxLen) { table[size][0] = "M"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); break out; } c = sentence[++i]; } table[size][0] = "M"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); --i; } else if (CharacterHelper.isEnglishLetter(sentence[i]) || sentence[i] == ' ') { sbAtom.append(sentence[i]); if (i == maxLen) { table[size][0] = "W"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); break; } char c = sentence[++i]; while (CharacterHelper.isEnglishLetter(c) || c == ' ') { sbAtom.append(sentence[i]); if (i == maxLen) { table[size][0] = "W"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); break out; } c = sentence[++i]; } table[size][0] = "W"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); --i; } else { table[size][0] = table[size][1] = String.valueOf(sentence[i]); ++size; } } return resizeArray(table, size); } /** * 数组减肥,原子分词可能会导致表格比原来的短 * * @param array * @param size * @return */ private static String[][] resizeArray(String[][] array, int size) { String[][] nArray = new String[size][]; System.arraycopy(array, 0, nArray, 0, size); return nArray; } @Override public Segment enableNumberQuantifierRecognize(boolean enable) { throw new UnsupportedOperationException("暂不支持"); // enablePartOfSpeechTagging(enable); // return super.enableNumberQuantifierRecognize(enable); } /** * 字符正规化表,相较于com/hankcs/hanlp/dictionary/other/CharTable.java,做了一些调整 * @author hankcs */ static private class CharTable { /** * 正规化使用的对应表 */ public static char[] CONVERT; static { long start = System.currentTimeMillis(); try { ObjectInputStream in = new ObjectInputStream(new FileInputStream(HanLP.Config.CharTablePath)); CONVERT = (char[]) in.readObject(); in.close(); } catch (Exception e) { logger.severe("字符正规化表加载失败,原因如下:"); e.printStackTrace(); System.exit(-1); } // see https://github.com/hankcs/HanLP/issues/13 CONVERT['“'] = '“'; CONVERT['”'] = '”'; CONVERT['.'] = '.'; CONVERT['.'] = '.'; CONVERT['。'] = ','; CONVERT['!'] = ','; CONVERT[','] = ','; CONVERT['…'] = ','; for (int i = 0; i < CONVERT.length; i++) { if (CONVERT[i] == '。') CONVERT[i] = ','; } logger.info("字符正规化表加载成功:" + (System.currentTimeMillis() - start) + " ms"); } /** * 将一个字符正规化 * @param c 字符 * @return 正规化后的字符 */ public static char convert(char c) { return CONVERT[c]; } public static char[] convert(char[] charArray) { char[] result = new char[charArray.length]; for (int i = 0; i < charArray.length; i++) { result[i] = CONVERT[charArray[i]]; } return result; } public static String convert(String charArray) { assert charArray != null; char[] result = new char[charArray.length()]; for (int i = 0; i < charArray.length(); i++) { result[i] = CONVERT[charArray.charAt(i)]; } return new String(result); } /** * 正规化一些字符(原地正规化) * @param charArray 字符 */ public static void normalization(char[] charArray) { assert charArray != null; for (int i = 0; i < charArray.length; i++) { charArray[i] = CONVERT[charArray[i]]; } } } }
src/main/java/com/hankcs/hanlp/seg/CRF/CRFSegment.java
/* * <summary></summary> * <author>He Han</author> * <email>[email protected]</email> * <create-date>2014/12/10 13:44</create-date> * * <copyright file="CRFSegment.java" company="上海林原信息科技有限公司"> * Copyright (c) 2003-2014, 上海林原信息科技有限公司. All Right Reserved, http://www.linrunsoft.com/ * This source is subject to the LinrunSpace License. Please contact 上海林原信息科技有限公司 to get more information. * </copyright> */ package com.hankcs.hanlp.seg.CRF; import com.hankcs.hanlp.HanLP; import com.hankcs.hanlp.algoritm.Viterbi; import com.hankcs.hanlp.corpus.tag.Nature; import com.hankcs.hanlp.dictionary.CoreDictionary; import com.hankcs.hanlp.dictionary.CoreDictionaryTransformMatrixDictionary; import com.hankcs.hanlp.model.CRFSegmentModel; import com.hankcs.hanlp.model.crf.Table; import com.hankcs.hanlp.seg.CharacterBasedGenerativeModelSegment; import com.hankcs.hanlp.seg.Segment; import com.hankcs.hanlp.seg.common.Term; import com.hankcs.hanlp.seg.common.Vertex; import com.hankcs.hanlp.utility.CharacterHelper; import java.io.FileInputStream; import java.io.ObjectInputStream; import java.util.*; import static com.hankcs.hanlp.utility.Predefine.logger; /** * 基于CRF的分词器 * * @author hankcs */ public class CRFSegment extends CharacterBasedGenerativeModelSegment { @Override protected List<Term> segSentence(char[] sentence) { if (sentence.length == 0) return Collections.emptyList(); char[] sentenceConverted = CharTable.convert(sentence); Table table = new Table(); table.v = atomSegmentToTable(sentenceConverted); CRFSegmentModel.crfModel.tag(table); List<Term> termList = new LinkedList<Term>(); if (HanLP.Config.DEBUG) { System.out.println("CRF标注结果"); System.out.println(table); } int offset = 0; for (int i = 0; i < table.v.length; offset += table.v[i][1].length(), ++i) { String[] line = table.v[i]; switch (line[2].charAt(0)) { case 'B': { int begin = offset; while (table.v[i][2].charAt(0) != 'E') { offset += table.v[i][1].length(); ++i; if (i == table.v.length) { break; } } if (i == table.v.length) { termList.add(new Term(new String(sentence, begin, offset - begin), null)); } else termList.add(new Term(new String(sentence, begin, offset - begin + table.v[i][1].length()), null)); } break; default: { termList.add(new Term(new String(sentence, offset, table.v[i][1].length()), null)); } break; } } if (config.speechTagging) { List<Vertex> vertexList = toVertexList(termList, true); Viterbi.compute(vertexList, CoreDictionaryTransformMatrixDictionary.transformMatrixDictionary); int i = 0; for (Term term : termList) { if (term.nature != null) term.nature = vertexList.get(i + 1).getNature(); ++i; } } if (config.useCustomDictionary) { List<Vertex> vertexList = toVertexList(termList, false); combineByCustomDictionary(vertexList); termList = toTermList(vertexList, config.offset); } return termList; } private static List<Vertex> toVertexList(List<Term> termList, boolean appendStart) { ArrayList<Vertex> vertexList = new ArrayList<Vertex>(termList.size() + 1); if (appendStart) vertexList.add(Vertex.B); for (Term term : termList) { CoreDictionary.Attribute attribute = CoreDictionary.get(term.word); if (attribute == null) attribute = new CoreDictionary.Attribute(Nature.nz); else term.nature = attribute.nature[0]; Vertex vertex = new Vertex(term.word, attribute); vertexList.add(vertex); } return vertexList; } /** * 将一条路径转为最终结果 * * @param vertexList * @param offsetEnabled 是否计算offset * @return */ protected static List<Term> toTermList(List<Vertex> vertexList, boolean offsetEnabled) { assert vertexList != null; int length = vertexList.size(); List<Term> resultList = new ArrayList<Term>(length); Iterator<Vertex> iterator = vertexList.iterator(); if (offsetEnabled) { int offset = 0; for (int i = 0; i < length; ++i) { Vertex vertex = iterator.next(); Term term = convert(vertex); term.offset = offset; offset += term.length(); resultList.add(term); } } else { for (int i = 0; i < length; ++i) { Vertex vertex = iterator.next(); Term term = convert(vertex); resultList.add(term); } } return resultList; } /** * 将节点转为term * * @param vertex * @return */ private static Term convert(Vertex vertex) { return new Term(vertex.realWord, vertex.guessNature()); } public static List<String> atomSegment(char[] sentence) { List<String> atomList = new ArrayList<String>(sentence.length); final int maxLen = sentence.length - 1; final StringBuilder sbAtom = new StringBuilder(); out: for (int i = 0; i < sentence.length; i++) { if (sentence[i] >= '0' && sentence[i] <= '9') { sbAtom.append(sentence[i]); if (i == maxLen) { atomList.add(sbAtom.toString()); sbAtom.setLength(0); break; } char c = sentence[++i]; while (c == '.' || c == '%' || (c >= '0' && c <= '9')) { sbAtom.append(sentence[i]); if (i == maxLen) { atomList.add(sbAtom.toString()); sbAtom.setLength(0); break out; } c = sentence[++i]; } atomList.add(sbAtom.toString()); sbAtom.setLength(0); --i; } else if (CharacterHelper.isEnglishLetter(sentence[i])) { sbAtom.append(sentence[i]); if (i == maxLen) { atomList.add(sbAtom.toString()); sbAtom.setLength(0); break; } char c = sentence[++i]; while (CharacterHelper.isEnglishLetter(c)) { sbAtom.append(sentence[i]); if (i == maxLen) { atomList.add(sbAtom.toString()); sbAtom.setLength(0); break out; } c = sentence[++i]; } atomList.add(sbAtom.toString()); sbAtom.setLength(0); --i; } else { atomList.add(String.valueOf(sentence[i])); } } return atomList; } public static String[][] atomSegmentToTable(char[] sentence) { String table[][] = new String[sentence.length][3]; int size = 0; final int maxLen = sentence.length - 1; final StringBuilder sbAtom = new StringBuilder(); out: for (int i = 0; i < sentence.length; i++) { if (sentence[i] >= '0' && sentence[i] <= '9') { sbAtom.append(sentence[i]); if (i == maxLen) { table[size][0] = "M"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); break; } char c = sentence[++i]; while (c == '.' || c == '%' || (c >= '0' && c <= '9')) { sbAtom.append(sentence[i]); if (i == maxLen) { table[size][0] = "M"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); break out; } c = sentence[++i]; } table[size][0] = "M"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); --i; } else if (CharacterHelper.isEnglishLetter(sentence[i]) || sentence[i] == ' ') { sbAtom.append(sentence[i]); if (i == maxLen) { table[size][0] = "W"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); break; } char c = sentence[++i]; while (CharacterHelper.isEnglishLetter(c) || c == ' ') { sbAtom.append(sentence[i]); if (i == maxLen) { table[size][0] = "W"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); break out; } c = sentence[++i]; } table[size][0] = "W"; table[size][1] = sbAtom.toString(); ++size; sbAtom.setLength(0); --i; } else { table[size][0] = table[size][1] = String.valueOf(sentence[i]); ++size; } } return resizeArray(table, size); } /** * 数组减肥,原子分词可能会导致表格比原来的短 * * @param array * @param size * @return */ private static String[][] resizeArray(String[][] array, int size) { String[][] nArray = new String[size][]; System.arraycopy(array, 0, nArray, 0, size); return nArray; } @Override public Segment enableNumberQuantifierRecognize(boolean enable) { throw new UnsupportedOperationException("暂不支持"); // enablePartOfSpeechTagging(enable); // return super.enableNumberQuantifierRecognize(enable); } /** * 字符正规化表,相较于com/hankcs/hanlp/dictionary/other/CharTable.java,做了一些调整 * @author hankcs */ static private class CharTable { /** * 正规化使用的对应表 */ public static char[] CONVERT; static { long start = System.currentTimeMillis(); try { ObjectInputStream in = new ObjectInputStream(new FileInputStream(HanLP.Config.CharTablePath)); CONVERT = (char[]) in.readObject(); in.close(); } catch (Exception e) { logger.severe("字符正规化表加载失败,原因如下:"); e.printStackTrace(); System.exit(-1); } // see https://github.com/hankcs/HanLP/issues/13 CONVERT['“'] = '“'; CONVERT['”'] = '”'; CONVERT['.'] = '.'; CONVERT['.'] = '.'; CONVERT['。'] = ','; CONVERT['!'] = ','; CONVERT[','] = ','; CONVERT['…'] = ','; for (int i = 0; i < CONVERT.length; i++) { if (CONVERT[i] == '。') CONVERT[i] = ','; } logger.info("字符正规化表加载成功:" + (System.currentTimeMillis() - start) + " ms"); } /** * 将一个字符正规化 * @param c 字符 * @return 正规化后的字符 */ public static char convert(char c) { return CONVERT[c]; } public static char[] convert(char[] charArray) { char[] result = new char[charArray.length]; for (int i = 0; i < charArray.length; i++) { result[i] = CONVERT[charArray[i]]; } return result; } public static String convert(String charArray) { assert charArray != null; char[] result = new char[charArray.length()]; for (int i = 0; i < charArray.length(); i++) { result[i] = CONVERT[charArray.charAt(i)]; } return new String(result); } /** * 正规化一些字符(原地正规化) * @param charArray 字符 */ public static void normalization(char[] charArray) { assert charArray != null; for (int i = 0; i < charArray.length; i++) { charArray[i] = CONVERT[charArray[i]]; } } } }
修复CRFSegment潜在问题
src/main/java/com/hankcs/hanlp/seg/CRF/CRFSegment.java
修复CRFSegment潜在问题
<ide><path>rc/main/java/com/hankcs/hanlp/seg/CRF/CRFSegment.java <ide> System.out.println(table); <ide> } <ide> int offset = 0; <add> OUTER: <ide> for (int i = 0; i < table.v.length; offset += table.v[i][1].length(), ++i) <ide> { <ide> String[] line = table.v[i]; <ide> if (i == table.v.length) <ide> { <ide> termList.add(new Term(new String(sentence, begin, offset - begin), null)); <add> break OUTER; <ide> } <ide> else <ide> termList.add(new Term(new String(sentence, begin, offset - begin + table.v[i][1].length()), null));
Java
apache-2.0
d6ea7087359dd0c69d0c4f5c5f0f6a935a5a4edf
0
Reissner/maven-latex-plugin,Reissner/maven-latex-plugin,Reissner/maven-latex-plugin,Reissner/maven-latex-plugin,Reissner/maven-latex-plugin
/* * The akquinet maven-latex-plugin project * * Copyright (c) 2011 by akquinet tech@spree GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.m2latex.core; import java.io.File; import java.io.FileFilter; import java.util.Iterator; import java.util.Collection; import java.util.TreeSet; import java.util.Map; import java.util.TreeMap; /** * The latex pre-processor is for preprocessing graphic files * in formats which cannot be included directly into a latex-file * and in finding the latex main files * which is done in {@link #processGraphicsSelectMain(File, DirNode)} * and in clearing the created files from the latex source directory * in {@link #clearCreated(File)}. */ public class LatexPreProcessor extends AbstractLatexProcessor { /** * Maps the suffix to the according handler. * If the handler is <code>null</code>, there is no handler. */ private final static Map<String, SuffixHandler> SUFFIX2HANDLER = new TreeMap<String, SuffixHandler>(); static { for (SuffixHandler handler : SuffixHandler.values()) { SUFFIX2HANDLER.put(handler.getSuffix(), handler); } } // static // used in preprocessing only private final static String SUFFIX_TEX = ".tex"; // home-brewed ending to represent tex including postscript private final static String SUFFIX_PTX = ".ptx"; // the next two for preprocessing and in LatexDev only final static String SUFFIX_PDFTEX = ".pdf_tex"; final static String SUFFIX_EPSTEX = ".eps_tex"; // suffix for xfig private final static String SUFFIX_FIG = ".fig"; // suffix for svg private final static String SUFFIX_SVG = ".svg"; // suffix for gnuplot // FIXME: to be made configurable private final static String SUFFIX_GP = ".gp"; // suffix for metapost private final static String SUFFIX_MP = ".mp"; // from xxx.mp creates xxx1.mps, xxx.log and xxx.mpx private final static String SUFFIX_MPS = ".mps"; private final static String SUFFIX_MPX = ".mpx"; // just for message private final static String SUFFIX_JPG = ".jpg"; private final static String SUFFIX_PNG = ".png"; // just for silently skipping private final static String SUFFIX_BIB = ".bib"; // for latex main file creating html and for graphics. final static String SUFFIX_EPS = ".eps"; private final static String SUFFIX_XBB = ".xbb"; private final static String SUFFIX_BB = ".bb"; LatexPreProcessor(Settings settings, CommandExecutor executor, LogWrapper log, TexFileUtils fileUtils) { super(settings, executor, log, fileUtils); } // Formats that work with LaTeX (dvi mode, using dvips): // eps // Formats that work with LaTeX (dvi mode, using dvipdfm(x)): // pdf, png, jpeg, eps (the latter not taken into account) // eps-source files handled via package epstopdf: // seemingly automatically converted eps-->pdf during latex run // also there is a program epstopdf and epspdf // There is a lot of experiments to do!! // MISSING: pdf and eps // NOTE: graphics is typically only included via dvipdfm(x) // Formats that work with pdfLaTeX (pdf mode): // pdf, png, jpeg, jbig2 (the latter not taken into account) // LuaTeX can also read // jpeg 2000 (not taken into account) // // Seemingly, it makes sense to distinguish from pdfViaDvi-parameter: // if set, seemingly, pdf, pgn and jpg is includable only // creating .bb or .xbb. // mp: besides mpost we also have mptopdf creating pdf: // mptopdf 05someMetapost.mp creates 05someMetapost1.mps // mptopdf 05someMetapost1.mps creates 05someMetapost1-mps.pdf /** * Handler for each suffix of a source file. * Mostly, these represent graphic formats * but also {@link #SUFFIX_TEX} is required * to detect the latex main files * and {@link #SUFFIX_TEX} and {@link #SUFFIX_BIB} * are needed for proper cleaning of the tex souce directory. */ enum SuffixHandler { /** * Handler for .fig-files representing the native xfig format. */ fig { // converts a fig-file into pdf and ptx // invoking {@link #runFig2Dev(File, LatexDev)} // TEX01, EEX01, EEX02, EEX03, WEX04, WEX05 void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 proc.runFig2Dev(file); } void clearTarget(File file, LatexPreProcessor proc) { // may log EFU05 proc.clearTargetPtxPdfEps(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_FIG; } }, /** * Handler for .gp-files representing the native gnuplot format. */ gp { // converts a gnuplot-file into pdf and ptx // invoking {@link #runGnuplot2Dev(File, LatexDev)} // TEX01, EEX01, EEX02, EEX03, WEX04, WEX05 void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.runGnuplot2Dev(file); } void clearTarget(File file, LatexPreProcessor proc) { // may log EFU05 proc.clearTargetPtxPdfEps(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_GP; } }, /** * Handler for .mp-files representing the metapost format. */ mp { // converts a metapost-file into mps-format // invoking {@link #runMetapost2mps(File)} // TEX01, EEX01, EEX02, EEX03, WEX04, WEX05 void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.runMetapost2mps(file); } void clearTarget(File file, LatexPreProcessor proc) { // may log WFU01, EFU05 proc.clearTargetMp(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_MP; } }, /** * Handler for .svg-files representing scaleable vector graphics. */ svg { // converts an svg-file into pdf and ptx // invoking {@link #runFig2Dev(File, LatexDev)} // TEX01, EEX01, EEX02, EEX03, WEX04, WEX05 // EFU06 if moving a file fails. void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.runSvg2Dev(file); // proc.log.info("Processing svg-file '" + file + // "' deferred to LaTeX run by need. "); // FIXME: this works for pdf but not for dvi: // even in the latter case, .pdf and .pdf_tex are created } void clearTarget(File file, LatexPreProcessor proc) { // may log EFU05 proc.clearTargetPtxPdfEps(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_SVG; } }, /** * Handler for .jpg-files representing a format * definde by the Joint Photographic Experts Group (jp(e)g). */ jpg { void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.log.info("Jpg-file '" + file + "' needs no processing. "); // FIXME: this works for pdf but not for dvi: // in the latter case: // ! LaTeX Error: Cannot determine size of graphic ... // FIXME: only for dvi // proc.runEbb(file); } // void clearTarget(File file, // LatexPreProcessor proc, // Map<File, SuffixHandler> file2handler) { // // do not add to file2handler // } void clearTarget(File file, LatexPreProcessor proc) { // throw new IllegalStateException // ("File '" + file + "' has no targets to be cleared. "); proc.clearTargetJpgPng(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_JPG; } }, /** * Handler for .png-files * representing the Portable Network Graphics format. */ png { void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.log.info("Png-file '" + file + "' needs no processing. "); // FIXME: this works for pdf but not for dvi: // in the latter case: // ! LaTeX Error: Cannot determine size of graphic ... // FIXME: only for dvi // proc.runEbb(file); } // void clearTarget(File file, // LatexPreProcessor proc, // Map<File, SuffixHandler> file2handler) { // // do not add to file2handler // } void clearTarget(File file, LatexPreProcessor proc) { // throw new IllegalStateException // ("File '" + file + "' has no targets to be cleared. "); proc.clearTargetJpgPng(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_PNG; } }, /** * Handler for .tex-files * representing the TeX format, to be more precise the LaTeX format. */ tex { void scheduleProcSrc(File file, Map<File, SuffixHandler> file2handler, LatexPreProcessor proc, Collection<File> latexMainFiles) { file2handler.put(file, this);// super // may log WFU03, WPP02 proc.addIfLatexMain(file, latexMainFiles); } void procSrc(File file, LatexPreProcessor proc) { // do nothing: no source } void clearTarget(File file, LatexPreProcessor proc, Map<File, SuffixHandler> file2handler) { // may log WPP02, WFU01, WFU03, EFU05 proc.clearTargetTexIfLatexMain(file); } void clearTarget(File file, LatexPreProcessor proc) { throw new IllegalStateException ("Clearing targets of '" + file + "' should have been done already. "); } String getSuffix() { return LatexPreProcessor.SUFFIX_TEX; } }, /** * Handler for .bib-files * representing the BibTeX format for bibliographies. */ bib { void procSrc(File file, LatexPreProcessor proc) { proc.log.info("Found bibliography file '" + file + "'. "); } void clearTarget(File file, LatexPreProcessor proc, Map<File, SuffixHandler> file2handler) { // do not add to file2handler } void clearTarget(File file, LatexPreProcessor proc) { throw new IllegalStateException ("File '" + file + "' has no targets to be cleared. "); } String getSuffix() { return LatexPreProcessor.SUFFIX_BIB; } }; // essentially, maps file to its handler // overwritten for tex: in addition add to latexMainFiles void scheduleProcSrc(File file, Map<File, SuffixHandler> file2handler, LatexPreProcessor proc, Collection<File> latexMainFiles) { file2handler.put(file, this); } // FIXME: to be updated // if a graphic format: process source. // For tex and for bib: do nothing. /** * Typically, .i.e. for {@link #fig}-, {@link #gp}-, {@link #mp}- * and associates <code>file</code> * Does the transformation of the file <code>file</code> * using <code>proc</code> immediately, except for * <ul> * <li> * {@link #svg}-files for which an info message is logged, * that transformation is done by need in the course of a LaTeX run. * What occurs are files .pdf and .pdf_tex * even if {@link Settings#pdfViaDvi} indicates creation of dvi files. * <li> * {@link #tex}-files which are only scheduled for later translation * just by adding them to <code>latexMainFiles</code> * if they are latex main files, and ignored otherwise * (see {@link LatexPreProcessor#addIfLatexMain(File, Collection)}). * <li> * {@link #bib}-files for which just an info message * that a bib file was found is logged. * </ul> * <p> * Logging: * <ul> * <li> WFU03: cannot close * <li> WPP02: tex file may be latex main file * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if applications for preprocessing graphic files failed. * <li> EFU06: if moving a file fails. * </ul> * * @param file * a file with ending given by {@link #getSuffix()}. * @param proc * a latex pre-processor. * @throws BuildFailureException * TEX01 only for {@link #fig}, {@link #gp} and {@link #mp} * because these invoke external programs. */ abstract void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException; /** * Typically, .i.e. for {@link #fig}-, {@link #gp}-, {@link #mp}- * and {@link #svg}-files just associates <code>file</code> * with this handler in <code>file2handler</code> * to schedule according targets for deletion except for * <ul> * <li> * {@link #tex}-files for which the target is cleared immediately * if it is a latex main file, otherwise ignoring * by invoking {@link #clearTargetTexIfLatexMain(File)}. * <li> * {@link #bib}-files * (maybe appropriate also for jpg-files and for png-files) * for which there are no targets * and so the association is not added to <code>file2handler</code>. * </ul> * <p> * Logging: * <ul> * <li> WPP02: tex file may be latex main file * <li> WFU01: Cannot read directory... * <li> WFU03: cannot close tex file * <li> EFU05: Failed to delete file * <ul> * * @param file * a file with ending given by {@link #getSuffix()}, * i.e. a file which can be handled by this handler. * @param proc * a latex pre-processor. * @param file2handler * maps <code>file</code> to its handler. * In general, this method adds * <code>file</code> to <code>file2handler</code> * together with its handler which is just <code>this</code>. * @see #clearTarget(File, LatexPreProcessor) */ // overwritten for tex, jpg, png and for bib // appropriate for svg although file may be removed from map later // used in clearCreated(File, DirNode) only void clearTarget(File file, LatexPreProcessor proc, Map<File, SuffixHandler> file2handler) { file2handler.put(file, this); } /** * Deletes the files potentially * created from the source file <code>file</code> * using <code>proc</code>. * <p> * Logging: * <ul> * <li> WFU01: Cannot read directory... * <li> EFU05: Failed to delete file * <ul> * * @param file * a file with ending given by {@link #getSuffix()}. * @param proc * a latex pre-processor. * @throws IllegalStateException * <ul> * <li> * if <code>file</code> has no targets to be deleted * as for jpg-files, png-files and bib-files. * <li> * if targets of <code>file</code> should have been cleared already * by {@link #clearTarget(File, LatexPreProcessor, Map)} * as for tex-files. * </ul> * @see #clearTarget(File, LatexPreProcessor, Map) */ // used in clearCreated(File, DirNode) only abstract void clearTarget(File file, LatexPreProcessor proc); /** * Returns the suffix of the file type * of the file type, this is the handler for. */ abstract String getSuffix(); } // enum SuffixHandler // FIXME: CAUTION with including pictures in xfig: // This is done as reference to included file. // Thus it breaks depencency chain. // The following shows the supported formats: // l.10 \includegraphics{02gp2pdf000} // % // I could not locate the file with any of these extensions: // .pdf,.PDF,.ai,.AI,.png,.PNG,.jpg,.JPG,.jpeg,.JPEG,.bmp,.BMP,.ps,.PS,.eps,.EPS,. // pz,.eps.Z,.ps.Z,.ps.gz,.eps.gz // Try typing <return> to proceed. // If that doesn't work, type X <return> to quit. // ) // :<- // Package srcltx Info: Expanded filename `03someGnuplot.ptx' to `03someGnuplot.pt // x.tex' on input line 949. // FIXME: allow variants: // - pdfhandler on .pdf,.PDF, (includable directly with pdflatex) // - png/jpghandler on .png,.PNG,.jpg,.JPG,.jpeg,.JPEG, // - maybe also for .fig // FIXME: questions: // - how to include .pdf into .dvi? // - how to include .eps into .pdf? // Question: how to transform ps into eps? // Research on the following: // .ai,.AI,.bmp,.BMP, // .ps,.PS,.eps,.EPS,. // pz,.eps.Z,.ps.Z,.ps.gz,.eps.gz // FIXME: decide whether suffix .ptx is replaced by .tex: // Advantage: because this is what it is. // Disadvantage: Requires mechanism // to determine whether tex is created or original // but this works the same as for pdf and for svg. /** * Converts the fig-file <code>figFile</code> * into a tex-file with ending ptx * including a pdf-file or an eps-file also created. * To that end, invokes {@link #runFig2DevInTex(File, LatexDev)} twice * to create a pdf-file and an eps-file * and invokes {@link #runFig2TexInclDev(File)} (once) * to create the tex-file. * <p> * Logging: * <ul> * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running the fig2dev command failed. * </ul> * * @param figFile * the fig file to be processed. * @throws BuildFailureException * TEX01 if invocation of the fig2dev command * returned by {@link Settings#getFig2devCommand()} failed. * This is invoked twice: once for creating the pdf-file * and once for creating the pdf_t-file. * @see #processGraphicsSelectMain(File, DirNode) */ // used in fig.procSrc(File, LatexPreProcessor) only private void runFig2Dev(File figFile) throws BuildFailureException { this.log.info("Processing fig-file '" + figFile + "'. "); // all three // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 runFig2DevInTex(figFile, LatexDev.pdf); runFig2DevInTex(figFile, LatexDev.dvips); runFig2TexInclDev(figFile); } /** * From <code>figFile</code> create pdf/eps-file * containing graphics without text with special flag set. * The output format depends on <code>dev</code>. * The resulting file is included in some tex-file * created by {@link #runFig2TexInclDev(File)}. * Conversion is done by {@link Settings#getFig2devCommand()}. * <p> * Logging: FIXME: * EEX01, EEX02, EEX03, WEX04, WEX05 * * @param figFile * the fig-file to be processed * @param dev * represents the target: either a pdf-file or an eps-file. * @throws BuildFailureException * FIXME: TEX01, */ private void runFig2DevInTex(File figFile, LatexDev dev) throws BuildFailureException { // Result file: either .pdf or .eps File figInTexFile = this.fileUtils .replaceSuffix(figFile, dev.getGraphicsInTexSuffix()); String command = this.settings.getFig2devCommand(); //if (update(figFile, pdfFile)) { String[] args = buildArgumentsFig2PdfEps(dev.getXFigInTexLanguage(), this.settings.getFig2devGenOptions(), this.settings.getFig2devPdfEpsOptions(), figFile, figInTexFile); this.log.debug("Running " + command + " -L pdftex/pstex ... on '" + figFile.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(figFile.getParentFile(), this.settings.getTexPath(), //**** command, args, figInTexFile); //} } // // PSTEX Options: // -b width specify width of blank border around figure (1/72 inch) // Found: affects clipping path and bounding box only. // Not usable if latex text is used because parts no longer fit. // -F use correct font sizes (points instead of 1/80inch) // Found: no effect // -g color background color // No idea in which format color is given. // -n name set title part of PostScript output to name // Found: works. Without it is just the name of xxx.fig // // the strange thing is that this is only a subset of the postscript options // to be verified whether all options apply or not. // The EPS driver has the following differences from PostScript: // o No showpage is generated // because the output is meant to be imported // into another program or document and not printed // o The landscape/portrait options are ignored // o The centering option is ignored // o The multiple-page option is ignored // o The paper size option is ignored // o The x/y offset options are ignored // The EPS driver has the following two special options: // // -B 'Wx [Wy X0 Y0]' // This specifies that the bounding box of the EPS file // should have the width Wx and the height Wy. // Note that it doesn't scale the figure to this size, // it merely sets the bounding box. // If a value less than or equal to 0 is specified for Wx or Wy, // these are set to the width/height respectively of the figure. // Origin is relative to screen (0,0) (upper-left). // Wx, Wy, X0 and Y0 are interpreted // in centimeters or inches depending on the measure // given in the fig-file. // Remember to put either quotes (") or apostrophes (') // to group the arguments to -B. // -R 'Wx [Wy X0 Y0]' // Same as the -B option except that X0 and Y0 // is relative to the lower left corner of the figure. // Remember to put either quotes (") or apostrophes (') // to group the arguments to -R. // The PDF driver uses all the PostScript options. // Explanation: many of these options do not make sense. // Tried: -x, -y to shift: does not work and does not make sense // What makes sense is // -a don't output user's login name (anonymous) // Found: login name occurs nowhere with and without -a // -N convert all colors to grayscale // Found: works // No information on PDFTEX options. // Instead: // // PDF Options: // -a don't output user's login name (anonymous) // -b width specify width of blank border around figure (1/72 inch) // -F use correct font sizes (points instead of 1/80inch) // -g color background color // // seemingly not the same, so maybe separate options required. // -n is pstex but not in pdf, // -a is pdf but not pstex... strange: is postscript /** * Returns an array of options of the form * <code>-L &lt;language> &lt;optionsGen> &lt;optionsPdfEps> xxx.fig xxx.pdf/xxx.eps * </code> for invocation of {@link Settings#getFig2devCommand()} * for creation of the pdf/eps-part of a fig-figure * as done in {@link #runFig2DevInTex(File, LatexDev)}. * * @param language * is the output language * which is either <code>pdftex</code> or <code>pstex</code> * @param optionsGen * the general options, applying to both the pdf/eps part * and the tex part of the figure under consideration. * @param optionsPdfEps * the options, specific for the pdf/eps part (which is the same) * of the figure under consideration. * @param figFile * the fig-file to be transformed. * @param grpFile * the graphics file (pdf/eps-file) * which is the result of the transformation. */ private String[] buildArgumentsFig2PdfEps(String language, String optionsGen, String optionsPdfEps, File figFile, File grpFile) { String[] optionsGenArr = optionsGen .isEmpty() ? new String[0] : optionsGen .split(" "); String[] optionsPdfEpsArr = optionsPdfEps.isEmpty() ? new String[0] : optionsPdfEps.split(" "); int lenSum = optionsGenArr.length + optionsPdfEpsArr.length; // add the four additional options String[] args = new String[lenSum + 4]; // language args[0] = "-L"; args[1] = language; // general options System.arraycopy(optionsGenArr, 0, args, 2, optionsGenArr .length); // language specific options System.arraycopy(optionsPdfEpsArr, 0, args, 2+optionsGenArr.length, optionsPdfEpsArr.length); // input: fig-file args[2+lenSum] = figFile.getName(); // output: pdf/eps-file args[3+lenSum] = grpFile.getName(); return args; } /** * From <code>figFile</code> create tex-file * containing text with special flag set and * including a graphic file containing the rest of <code>figFile</code>. * Inclusion is without file extension and so both possible results * of {@link #runFig2DevInTex(File, LatexDev)} can be included * when compiling with latex. * Conversion is done by {@link Settings#getFig2devCommand()}. * <p> * Logging: FIXME: * warning EEX01, EEX02, EEX03, WEX04, WEX05 * * @param figFile * the fig-file to be processed * @throws BuildFailureException * FIXME: TEX01, */ private void runFig2TexInclDev(File figFile) throws BuildFailureException { // result file: .ptx File ptxFile = this.fileUtils.replaceSuffix(figFile, SUFFIX_PTX); String command = this.settings.getFig2devCommand(); //if (update(figFile, pdf_tFile)) { String[] args = buildArgumentsFig2Ptx(this.settings.getFig2devGenOptions(), this.settings.getFig2devPtxOptions(), figFile, ptxFile); this.log.debug("Running " + command + " -L (pdf/ps)tex_t... on '" + figFile.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(figFile.getParentFile(), this.settings.getTexPath(), //**** command, args, ptxFile); //} } /** * The name of the language * used by the {@link Settings#getFig2devCommand()} * to specify ``special'' text without graphic of an xfig-picture. * Note that the languages <code>pdftex_t</code> and <code>pstex_t</code> * are equivalent. */ private final static String XFIG_TEX_LANGUAGE = "pdftex_t"; // Since pstex_t is equivalent with pdftex_t, // also the options are the same (hopefully) // // PSTEX_T Options: // -b width specify width of blank border around figure (1/72 inch) // -E num set encoding for text translation (0 no translation, // 1 ISO-8859-1, 2 ISO-8859-2) // -F don't set font family/series/shape, so you can // set it from latex // -p name name of the PostScript file to be overlaid /** * Returns an array of options of the form * <code>-L &lt;language> &lt;optionsGen> &lt;optionsPdfEps> -p xxx xxx.fig xxx.ptx</code> for invocation of {@link Settings#getFig2devCommand()} * for creation of the tex-part of a fig-figure * as done in {@link #runFig2TexInclDev(File)}. * Note that the option <code>-p xxx</code> * specifies the name of the pdf/eps-file * included in the result file <code>ptxFile</code> * without suffix. * * @param optionsGen * the general options, applying to both the pdf/eps part * and the tex part of the figure under consideration. * @param optionsPtx * the options, specific for the tex part * of the figure under consideration (for the ptx-file). * @param figFile * the fig-file to be transformed. * @param ptxFile * the ptx-file which is the result of the transformation. */ private String[] buildArgumentsFig2Ptx(String optionsGen, String optionsPtx, File figFile, File ptxFile) { String[] optionsGenArr = optionsGen.isEmpty() ? new String[0] : optionsGen.split(" "); String[] optionsPtxArr = optionsPtx.isEmpty() ? new String[0] : optionsPtx.split(" "); int lenSum = optionsGenArr.length +optionsPtxArr.length; // add the six additional options String[] args = new String[lenSum + 6]; // language args[0] = "-L"; args[1] = XFIG_TEX_LANGUAGE; // general options System.arraycopy(optionsGenArr, 0, args, 2, optionsGenArr.length); // language specific options System.arraycopy(optionsPtxArr, 0, args, 2+optionsGenArr.length, optionsPtxArr.length); // -p pdf/eps-file name in ptx-file without suffix args[2+lenSum] = "-p"; // full path without suffix args[3+lenSum] = this.fileUtils.replaceSuffix(figFile, SUFFIX_VOID) .getName(); // input: fig-file args[4+lenSum] = figFile.getName(); // output: ptx-file args[5+lenSum] = ptxFile.getName(); return args; } /** * Deletes the files <code>xxx.ptx</code>, <code>xxx.pdf</code> and * <code>xxx.eps</code> * created from the graphic file <code>grpFile</code> * of the form <code>xxx.y</code>. * <p> * Logging: * EFU05: Failed to delete file * * @param grpFile * a graphic file. */ // for formats fig, gp and svg: since all of these create ptx, pdf and eps private void clearTargetPtxPdfEps(File grpFile) { this.log.info("Deleting targets of file '" + grpFile + "'. "); // may log EFU05 deleteIfExists(grpFile, SUFFIX_PTX); deleteIfExists(grpFile, LatexDev.pdf .getGraphicsInTexSuffix());// pdf deleteIfExists(grpFile, LatexDev.dvips.getGraphicsInTexSuffix());// eps } /** * Converts a gnuplot-file into a tex-file with ending ptx * including a pdf-file or an eps-file also created. * To that end, invokes {@link #runGnuplot2Dev(File, LatexDev)} twice * to create a pdf-file and an eps-file * and to create the tex-file which can include both. * <p> * Logging: * <ul> * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running the ptx/pdf-conversion built-in in gnuplot fails. * </ul> * * @param gpFile * the gp-file (gnuplot format) to be converted to pdf and ptx. * @throws BuildFailureException * TEX01 if invocation of the ptx/pdf-conversion built-in * in gnuplot fails. * @see #processGraphicsSelectMain(File, DirNode) */ // used in gp.procSrc(File, LatexPreProcessor) only private void runGnuplot2Dev(File gpFile) throws BuildFailureException { this.log.info("Processing gnuplot-file '" + gpFile + "'. "); // both may throw BuildFailureException TEX01, // and may log EEX01, EEX02, EEX03, WEX04, WEX05 runGnuplot2Dev(gpFile, LatexDev.dvips); runGnuplot2Dev(gpFile, LatexDev.pdf); } // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 private void runGnuplot2Dev(File gpFile, LatexDev dev) throws BuildFailureException { String command = this.settings.getGnuplotCommand(); File grpFile = this.fileUtils.replaceSuffix (gpFile, dev.getGraphicsInTexSuffix()); File ptxFile = this.fileUtils.replaceSuffix(gpFile, SUFFIX_PTX); String[] args = new String[] { "-e", // run a command string "..." with commands sparated by ';' // "set terminal cairolatex " + dev.getGnuplotInTexLanguage() + " " + this.settings.getGnuplotOptions() + ";set output \"" + ptxFile.getName() + "\";load \"" + gpFile.getName() + "\"" }; // if (update(gpFile, ptxFile)) { this.log.debug("Running " + command + " -e... on '" + gpFile.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(gpFile.getParentFile(), //workingDir this.settings.getTexPath(), //**** command, args, grpFile, ptxFile); // } // no check: just warning that no output has been created. } /** * Runs mpost on mp-files to generate mps-files. * <p> * Logging: * <ul> * <li> WFU03: cannot close log file * <li> EAP01: Running <code>command</code> failed. For details... * <li> EAP02: Running <code>command</code> failed. No log file * <li> WAP04: if log file is not readable. * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running the mpost command failed. * </ul> * * @param mpFile * the metapost file to be processed. * @throws BuildFailureException * TEX01 if invocation of the mpost command failed. * @see #processGraphicsSelectMain(File, DirNode) */ // used in mp.procSrc(File, LatexPreProcessor) only private void runMetapost2mps(File mpFile) throws BuildFailureException { this.log.info("Processing metapost-file '" + mpFile + "'. "); String command = this.settings.getMetapostCommand(); File workingDir = mpFile.getParentFile(); // for more information just type mpost --help String[] args = buildArguments(this.settings.getMetapostOptions(), mpFile); this.log.debug("Running " + command + " on '" + mpFile.getName() + "'. "); // FIXME: not check on all created files, // but this is not worse than with latex // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(workingDir, this.settings.getTexPath(), //**** command, args, this.fileUtils.replaceSuffix(mpFile, "1"+SUFFIX_MPS)); // from xxx.mp creates xxx1.mps, xxx.log and xxx.mpx // FIXME: what is xxx.mpx for? File logFile = this.fileUtils.replaceSuffix(mpFile, SUFFIX_LOG); // may log WFU03, EAP01, EAP02, WAP04 logErrs(logFile, command, this.settings.getPatternErrMPost()); // FIXME: what about warnings? } /** * Deletes the graphic files * created from the metapost-file <code>mpFile</code>. * <p> * Logging: * <ul> * <li> WFU01: Cannot read directory ... * <li> EFU05: Failed to delete file * </ul> * * @param mpFile * a metapost file. */ private void clearTargetMp(File mpFile) { this.log.info("Deleting targets of graphic-file '" + mpFile + "'. "); // may log EFU05 deleteIfExists(mpFile, SUFFIX_LOG); deleteIfExists(mpFile, SUFFIX_FLS); deleteIfExists(mpFile, SUFFIX_MPX); // delete files xxxNumber.mps String name1 = mpFile.getName(); final String root = name1.substring(0, name1.lastIndexOf(".")); FileFilter filter = new FileFilter() { public boolean accept(File file) { return !file.isDirectory() && file.getName().matches(root + "\\d+" + SUFFIX_MPS); } }; // may log WFU01, EFU05 this.fileUtils.deleteX(mpFile, filter); } /** * Converts an svg-file into a tex-file with ending ptx * including a pdf-file or an eps-file also created. * To that end, invokes {@link #runSvg2Dev(File, LatexDev, boolean)} twice * to create a pdf-file and an eps-file * and to create the tex-file which can include both. * <p> * Logging: * <ul> * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running the ptx/pdf-conversion built-in in svg2dev fails. * <li> EFU06 if moving a file fails. * </ul> * * @param svgFile * the svg-file to be converted to a pdf-file and a ptx-file. * @throws BuildFailureException * TEX01 if invocation of the ptx/pdf-conversion built-in * in svg2dev fails. * @see #processGraphicsSelectMain(File, DirNode) */ // used in svg.procSrc(File, LatexPreProcessor) only private void runSvg2Dev(File svgFile) throws BuildFailureException { this.log.info("Processing svg-file '" + svgFile + "'. "); // both may throw BuildFailureException TEX01, // and may log EEX01, EEX02, EEX03, WEX04, WEX05 runSvg2Dev(svgFile, LatexDev.pdf, false); // FIXME: avoiding may be wrong runSvg2Dev(svgFile, LatexDev.dvips, true);// that way page=1 is avoided } // FIXME: still the included pdf/eps-file does not occur // with full path in ptx-file // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05, EFU06 private void runSvg2Dev(File svgFile, LatexDev dev, boolean renameTex) throws BuildFailureException { String command = this.settings.getSvg2devCommand(); // full path without suffix File grpFile = this.fileUtils.replaceSuffix(svgFile, SUFFIX_VOID); // FIXME: eliminate literal: comes from .pdf_tex and .eps_tex // dropping .pdf and .eps, respectively File texFile = this.fileUtils.replaceSuffix(svgFile, "_tex"); String[] args = buildNullArguments(this.settings.getSvg2devOptions(), svgFile); args[0] = dev.getSvgExportOption() + grpFile.getName(); this.log.debug("Running " + command + " on '" + svgFile.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(svgFile.getParentFile(), this.settings.getTexPath(), //**** command, args, grpFile, texFile); // rename grpFile and texFile // may log EFU06: cannot move file this.fileUtils.moveOrError(grpFile, this.fileUtils.replaceSuffix (svgFile, dev.getGraphicsInTexSuffix())); if (renameTex) { // may log EFU06: cannot move file this.fileUtils.moveOrError(texFile, this.fileUtils.replaceSuffix (svgFile, SUFFIX_PTX)); } } // Additional research: // Documentation says, that this is needed for interface eps, // but not for interface pdf. // Experiments show, that we can do without it in any case. private void runEbb(File file) throws BuildFailureException { String command = this.settings.getEbbCommand(); File workingDir = file.getParentFile(); String[] args = buildNullArguments(this.settings.getEbbOptions(), file); // Creation of .xbb files for driver dvipdfmx // FIXME: literal args[0] ="-x"; File resFile = this.fileUtils.replaceSuffix(file, SUFFIX_XBB); this.log.debug("Running " + command + " twice on '" + file.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(workingDir, this.settings.getTexPath(), //**** command, args, resFile); // Creation of .bb files for driver dvipdfm // FIXME: literal args[0] ="-m"; resFile = this.fileUtils.replaceSuffix(file, SUFFIX_BB); this.executor.execute(workingDir, this.settings.getTexPath(), //**** command, args, resFile); } /** * Returns an array of strings, * where the 0th entry is <code>null</code> * and a placeholder for option <code>-x</code> or <code>-m</code> * when used by {@link #runEbb(File)} * and for the export option * when used by {@link #runSvg2Dev(File, LatexDev, boolean)} * then follow the options from <code>options</code> * and finally comes the name of <code>file</code>. */ protected static String[] buildNullArguments(String options, File file) { if (options.isEmpty()) { return new String[] {null, file.getName()}; } String[] optionsArr = options.split(" "); String[] args = new String[optionsArr.length+2]; System.arraycopy(optionsArr, 0, args, 1, optionsArr.length); args[args.length-1] = file.getName(); assert args[0] == null; return args; } /** * Deletes the graphic files * created from the svg-file <code>svgFile</code>. * <p> * Logging: * EFU05: Failed to delete file */ private void clearTargetJpgPng(File file) { this.log.info("Deleting targets of jpg/png-file '" + file + "'. "); // may log EFU05 deleteIfExists(file, SUFFIX_XBB); deleteIfExists(file, SUFFIX_BB); // deleteIfExists(svgFile, SUFFIX_PSTEX ); // deleteIfExists(file, SUFFIX_PDF ); // FIXME: this works for pdf but not for dvi: // even in the latter case, .pdf and .pdf_tex are created } /** * * <p> * Logging: * EFU05: Failed to delete file */ private void deleteIfExists(File file, String suffix) { File delFile = this.fileUtils.replaceSuffix(file, suffix); if (!delFile.exists()) { return; } // may log EFU05 this.fileUtils.deleteOrError(delFile); } /** * Returns whether <code>texFile</code> is a latex main file, * provided it is readable. * Otherwise logs a warning and returns <code>false</code>. * <p> * Logging: * <ul> * <li> WFU03: cannot close * <li> WPP02: tex file may be latex main file * <ul> * * @param texFile * the tex-file to decide on whether it is a latex main file. * @return * whether <code>texFile</code> is definitively a latex main file. * If this is not readable, <code>false</code>. */ // used // by addIfLatexMain(File, Collection) and // by clearTargetTexIfLatexMain(File) private boolean isLatexMainFile(File texFile) { assert texFile.exists(); // may log WFU03 cannot close Boolean res = this.fileUtils.matchInFile (texFile, this.settings.getPatternLatexMainFile()); if (res == null) { this.log.warn("WPP02: Cannot read tex file '" + texFile + "'; may bear latex main file. "); return false; } return res; } /** * If the tex-file <code>texFile</code> is a latex main file, * add it to <code>latexMainFiles</code>. * <p> * Logging: * <ul> * <li> WFU03: cannot close * <li> WPP02: tex file may be latex main file * <ul> * * @param texFile * the tex-file to be added to <code>latexMainFiles</code> * if it is a latex main file. * @param latexMainFiles * the collection of latex main files found so far. */ // invoked only by tex.procSrc(File, LatexPreProcessor) private void addIfLatexMain(File texFile, Collection<File> latexMainFiles) { // may log WFU03, WPP02 if (isLatexMainFile(texFile)) { this.log.info("Detected latex-main-file '" + texFile + "'. "); latexMainFiles.add(texFile); } } /** * Deletes the files * created from the tex-file <code>texFile</code>, * if that is a latex main file. * <p> * Logging: * <ul> * <li> WPP02: tex file may be latex main file * <li> WFU01: Cannot read directory... * <li> WFU03: cannot close tex file * <li> EFU05: Failed to delete file * </ul> * * @param texFile * the tex-file of which the created files shall be deleted * if it is a latex main file. */ private void clearTargetTexIfLatexMain(File texFile) { // exclude files which are no latex main files // may log WFU03, WPP02 if (!isLatexMainFile(texFile)) { return; } this.log.info("Deleting targets of latex main file '" + texFile + "'. "); FileFilter filter = this.fileUtils.getFileFilter (texFile, this.settings.getPatternCreatedFromLatexMain()); // may log WFU01, EFU05 this.fileUtils.deleteX(texFile, filter); } /** * Detects files in the directory represented by <code>texNode</code> * and in subdirectories recursively: * <ul> * <li> * those which are in various graphic formats incompatible with LaTeX * are converted into formats which can be inputted or included directly * into a latex file. * <li> * returns the set of latex main files. * </ul> * <p> * Logging: * <ul> * <li> WFU03: cannot close * <li> WPP02: tex file may be latex main file * <li> WPP03: Skipped processing of files with suffixes ... * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running graphic processors failed. * <li> EFU06: if moving a file fails. * </ul> * * @param dir * represents the tex source directory or a subdirectory. * @param node * a node associated with <code>dir</code>. * @return * the collection of latex main files. * @throws BuildFailureException * TEX01 invoking * {@link #processGraphicsSelectMain(File, DirNode, Collection, Collection)} */ // used in LatexProcessor.create() // and in LatexProcessor.processGraphics() only // where 'node' represents the tex source directory Collection<File> processGraphicsSelectMain(File dir, DirNode node) throws BuildFailureException { Collection<String> skipped = new TreeSet<String>(); Collection<File> latexMainFiles = new TreeSet<File>(); if (this.settings.getReadTexSrcProcDirRec()) { // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, // WEX04, WEX05, WFU03, WPP02, EFU06 processGraphicsSelectMainRec(dir, node, skipped, latexMainFiles); } else { // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, // WEX04, WEX05, WFU03, WPP02, EFU06 processGraphicsSelectMain (dir, node, skipped, latexMainFiles); } if (!skipped.isEmpty()) { this.log.warn("WPP03: Skipped processing of files with suffixes " + skipped + ". "); } return latexMainFiles; } /** * <p> * Logging: * <ul> * <li> WFU03: cannot close file * <li> EFU06: Cannot move file * <li> WPP02: tex file may be latex main file * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if applications for preprocessing graphic files failed. * </ul> * * @param dir * represents the tex source directory or a subdirectory. * @param node * a node associated with <code>dir</code>. * @param skipped * the collection of suffixes of files with handling skipped so far * because there is no handler. * FIXME: interesting for files without suffix or for hidden files. * @param latexMainFiles * the collection of latex main files found so far. * @throws BuildFailureException * TEX01 invoking * {@link LatexPreProcessor.SuffixHandler#procSrc(File, LatexPreProcessor)} * only for {@link LatexPreProcessor.SuffixHandler#fig}, * {@link LatexPreProcessor.SuffixHandler#gp} and * {@link LatexPreProcessor.SuffixHandler#mp} * because these invoke external programs. */ private void processGraphicsSelectMain(File dir, DirNode node, Collection<String> skipped, Collection<File> latexMainFiles) throws BuildFailureException { assert node.isValid();// i.e. node.regularFile != null // FIXME: processing of the various graphic files // may lead to overwrite // FIXME: processing of the latex main files // may lead to overwrite of graphic files or their targets File file; String suffix; SuffixHandler handler; Collection<File> latexMainFilesLocal = new TreeSet<File>(); Map<File, SuffixHandler> file2handler = new TreeMap<File, SuffixHandler>(); for (String fileName : node.getRegularFileNames()) { file = new File(dir, fileName); suffix = this.fileUtils.getSuffix(file); handler = SUFFIX2HANDLER.get(suffix); if (handler == null) { this.log.debug("Skipping processing of file '" + file + "'. "); // warning on skipped files even on hidden files. skipped.add(suffix); } else { // Either performs transformation now // or schedule for later (latex main files) // or do nothing if no targets like bib-files // or tex-files to be inputted. // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 // WFU03, WPP02 if (!file.isHidden()) { handler.scheduleProcSrc(file, file2handler, this, latexMainFilesLocal); } } } // for latexMainFiles.addAll(latexMainFilesLocal); // remove sources from file2handler.keySet() // if created by local latex main files FileFilter filter; for (File lmFile : latexMainFilesLocal) { filter = this.fileUtils.getFileFilter (lmFile, this.settings.getPatternCreatedFromLatexMain()); Iterator<File> iter = file2handler.keySet().iterator(); File src; while (iter.hasNext()) { src = iter.next(); if (filter.accept(src)) { // FIXME: maybe this is too much: // better just one warning per latex main file // or just suffixes, i.e. handlers this.log.warn("WPP04: Skip processing '" + src + "': interpreted as target of '" + lmFile + "'. "); iter.remove(); continue; } // Here, src is not overwritten processing lmFile // FIXME: to be checked, whether this is also true // for targets of src } } // Here process file, except tex (bib at least info) // with associated handler // FIXME: How to ensure, that nothing is overwritten? // NO: if a file is overwritten, then it is no source // and needs no processing for (Map.Entry<File, SuffixHandler> entry : file2handler.entrySet()) { // procSrc may throw BuildFailureException TEX01 // and may log WFU03, WPP02, // EEX01, EEX02, EEX03, WEX04, WEX05 and EFU06 entry.getValue().procSrc(entry.getKey(), this); } } /** * Like * {@link #processGraphicsSelectMainRec(File,DirNode,Collection,Collection)} * but with recursion to subdirectories. */ private void processGraphicsSelectMainRec(File dir, DirNode node, Collection<String> skipped, Collection<File> latexMainFiles) throws BuildFailureException { processGraphicsSelectMain(dir, node, skipped, latexMainFiles); // go on recursively with subdirectories for (Map.Entry<String, DirNode> entry : node.getSubdirs().entrySet()) { // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05, WPP03 // WFU03, WPP02, EFU06 processGraphicsSelectMainRec(new File(dir, entry.getKey()), entry.getValue(), skipped, latexMainFiles); } } /** * Deletes all created files * in the directory represented by <code>texDir</code> * tracing subdirectories recursively. * For details of deletions within a single directory * see {@link #clearCreated(File, DirNode)}. * <p> * Logging: * <ul> * <li> WPP02: tex file may be latex main file * <li> WFU01: Cannot read directory... * <li> WFU03: cannot close tex file * <li> EFU05: Failed to delete file * </ul> * * @param texDir * represents the tex source directory or a subdirectory. */ // invoked in LatexProcessor.clearAll() only void clearCreated(File texDir) { clearCreated(texDir, new DirNode(texDir, this.fileUtils)); } /** * Deletes all created files * in the directory represented by <code>node</code>, recursively. * In each directory, the sub-directories are not deleted themselves * but cleaned recursively. * The other files are cleaned, i.e. * their targets are deleted in an ordering reverse to creation * proceeding in the following steps: * <ul> * <li> * First the targets of the latex main files are deleted, * whereas the targets of the graphic (source) files * are just scheduled for deletion. * For details see * {@link LatexPreProcessor.SuffixHandler#clearTarget(File, LatexPreProcessor, Map)} * {@link LatexPreProcessor.SuffixHandler#tex#clearTarget(File, LatexPreProcessor, Map)} . * FIXME: what about deletion of a graphic source file in this course? * <li> * Then the graphic source files scheduled are un-scheduled * if deleted by some latex main file. * <li> * Finally, the targets of the graphic souce files are deleted. * FIXME: what if this results in deletion of a graphic source file? * </ul> * Then the files with handler * If a file has a prefix without handler, * (see {@link SuffixHandler#getSuffix()}) it is ignored. * Else its target is cleared as described in * {@link SuffixHandler#clearTarget(File, LatexPreProcessor, Map)}. * <p> * Logging: * <ul> * <li> WPP02: tex file may be latex main file * <li> WFU01: Cannot read directory... * <li> WFU03: cannot close tex file * <li> EFU05: Failed to delete file * </ul> * * @param dir * represents the tex source directory or a subdirectory. * @param node * a node associated with <code>dir</code>. */ private void clearCreated(File dir, DirNode node) { assert dir.isDirectory(); File file; SuffixHandler handler; Map<File, SuffixHandler> file2handler = new TreeMap<File, SuffixHandler>(); for (String fileName : node.getRegularFileNames()) { file = new File(dir, fileName); handler = SUFFIX2HANDLER.get(this.fileUtils.getSuffix(file)); if (handler != null) { // either clear targets now or schedule for clearing // (in particular do nothing if no target) // may log WPP02, WFU01, WFU03, EFU05 handler.clearTarget(file, this, file2handler); } } // clear targets of all still existing files // which just scheduled for clearing for (Map.Entry<File,SuffixHandler> entry : file2handler.entrySet()) { file = entry.getKey(); if (file.exists()) { entry.getValue().clearTarget(file, this); } } for (Map.Entry<String,DirNode> entry : node.getSubdirs().entrySet()) { // may log WPP02, WFU01, WFU03, EFU05 clearCreated(new File(dir, entry.getKey()), entry.getValue()); } } // FIXME: suffix for tex files containing text and including pdf }
maven-latex-plugin/src/main/java/org/m2latex/core/LatexPreProcessor.java
/* * The akquinet maven-latex-plugin project * * Copyright (c) 2011 by akquinet tech@spree GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.m2latex.core; import java.io.File; import java.io.FileFilter; import java.util.Iterator; import java.util.Collection; import java.util.TreeSet; import java.util.Map; import java.util.TreeMap; /** * The latex pre-processor is for preprocessing graphic files * in formats which cannot be included directly into a latex-file * and in finding the latex main files * which is done in {@link #processGraphicsSelectMain(File, DirNode)} * and in clearing the created files from the latex source directory * in {@link #clearCreated(File)}. */ public class LatexPreProcessor extends AbstractLatexProcessor { /** * Maps the suffix to the according handler. * If the handler is <code>null</code>, there is no handler. */ private final static Map<String, SuffixHandler> SUFFIX2HANDLER = new TreeMap<String, SuffixHandler>(); static { for (SuffixHandler handler : SuffixHandler.values()) { SUFFIX2HANDLER.put(handler.getSuffix(), handler); } } // static // used in preprocessing only private final static String SUFFIX_TEX = ".tex"; // home-brewed ending to represent tex including postscript private final static String SUFFIX_PTX = ".ptx"; // the next two for preprocessing and in LatexDev only final static String SUFFIX_PDFTEX = ".pdf_tex"; final static String SUFFIX_EPSTEX = ".eps_tex"; // suffix for xfig private final static String SUFFIX_FIG = ".fig"; // suffix for svg private final static String SUFFIX_SVG = ".svg"; // suffix for gnuplot // FIXME: to be made configurable private final static String SUFFIX_GP = ".gp"; // suffix for metapost private final static String SUFFIX_MP = ".mp"; // from xxx.mp creates xxx1.mps, xxx.log and xxx.mpx private final static String SUFFIX_MPS = ".mps"; private final static String SUFFIX_MPX = ".mpx"; // just for message private final static String SUFFIX_JPG = ".jpg"; private final static String SUFFIX_PNG = ".png"; // just for silently skipping private final static String SUFFIX_BIB = ".bib"; // for latex main file creating html and for graphics. final static String SUFFIX_EPS = ".eps"; private final static String SUFFIX_XBB = ".xbb"; private final static String SUFFIX_BB = ".bb"; LatexPreProcessor(Settings settings, CommandExecutor executor, LogWrapper log, TexFileUtils fileUtils) { super(settings, executor, log, fileUtils); } // Formats that work with LaTeX (dvi mode, using dvips): // eps // Formats that work with LaTeX (dvi mode, using dvipdfm(x)): // pdf, png, jpeg, eps (the latter not taken into account) // eps-source files handled via package epstopdf: // seemingly automatically converted eps-->pdf during latex run // also there is a program epstopdf and epspdf // There is a lot of experiments to do!! // MISSING: pdf and eps // NOTE: graphics is typically only included via dvipdfm(x) // Formats that work with pdfLaTeX (pdf mode): // pdf, png, jpeg, jbig2 (the latter not taken into account) // LuaTeX can also read // jpeg 2000 (not taken into account) // // Seemingly, it makes sense to distinguish from pdfViaDvi-parameter: // if set, seemingly, pdf, pgn and jpg is includable only // creating .bb or .xbb. // mp: besides mpost we also have mptopdf creating pdf: // mptopdf 05someMetapost.mp creates 05someMetapost1.mps // mptopdf 05someMetapost1.mps creates 05someMetapost1-mps.pdf /** * Handler for each suffix of a source file. * Mostly, these represent graphic formats * but also {@link #SUFFIX_TEX} is required * to detect the latex main files * and {@link #SUFFIX_TEX} and {@link #SUFFIX_BIB} * are needed for proper cleaning of the tex souce directory. */ enum SuffixHandler { /** * Handler for .fig-files representing the native xfig format. */ fig { // converts a fig-file into pdf and ptx // invoking {@link #runFig2Dev(File, LatexDev)} // TEX01, EEX01, EEX02, EEX03, WEX04, WEX05 void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 proc.runFig2Dev(file); } void clearTarget(File file, LatexPreProcessor proc) { // may log EFU05 proc.clearTargetPtxPdfEps(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_FIG; } }, /** * Handler for .gp-files representing the native gnuplot format. */ gp { // converts a gnuplot-file into pdf and ptx // invoking {@link #runGnuplot2Dev(File, LatexDev)} // TEX01, EEX01, EEX02, EEX03, WEX04, WEX05 void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.runGnuplot2Dev(file); } void clearTarget(File file, LatexPreProcessor proc) { // may log EFU05 proc.clearTargetPtxPdfEps(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_GP; } }, /** * Handler for .mp-files representing the metapost format. */ mp { // converts a metapost-file into mps-format // invoking {@link #runMetapost2mps(File)} // TEX01, EEX01, EEX02, EEX03, WEX04, WEX05 void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.runMetapost2mps(file); } void clearTarget(File file, LatexPreProcessor proc) { // may log WFU01, EFU05 proc.clearTargetMp(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_MP; } }, /** * Handler for .svg-files representing scaleable vector graphics. */ svg { // converts an svg-file into pdf and ptx // invoking {@link #runFig2Dev(File, LatexDev)} // TEX01, EEX01, EEX02, EEX03, WEX04, WEX05 // EFU06 if moving a file fails. void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.runSvg2Dev(file); // proc.log.info("Processing svg-file '" + file + // "' deferred to LaTeX run by need. "); // FIXME: this works for pdf but not for dvi: // even in the latter case, .pdf and .pdf_tex are created } void clearTarget(File file, LatexPreProcessor proc) { // may log EFU05 proc.clearTargetPtxPdfEps(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_SVG; } }, /** * Handler for .jpg-files representing a format * definde by the Joint Photographic Experts Group (jp(e)g). */ jpg { void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.log.info("Jpg-file '" + file + "' needs no processing. "); // FIXME: this works for pdf but not for dvi: // in the latter case: // ! LaTeX Error: Cannot determine size of graphic ... // FIXME: only for dvi // proc.runEbb(file); } // void clearTarget(File file, // LatexPreProcessor proc, // Map<File, SuffixHandler> file2handler) { // // do not add to file2handler // } void clearTarget(File file, LatexPreProcessor proc) { // throw new IllegalStateException // ("File '" + file + "' has no targets to be cleared. "); proc.clearTargetJpgPng(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_JPG; } }, /** * Handler for .png-files * representing the Portable Network Graphics format. */ png { void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException { proc.log.info("Png-file '" + file + "' needs no processing. "); // FIXME: this works for pdf but not for dvi: // in the latter case: // ! LaTeX Error: Cannot determine size of graphic ... // FIXME: only for dvi // proc.runEbb(file); } // void clearTarget(File file, // LatexPreProcessor proc, // Map<File, SuffixHandler> file2handler) { // // do not add to file2handler // } void clearTarget(File file, LatexPreProcessor proc) { // throw new IllegalStateException // ("File '" + file + "' has no targets to be cleared. "); proc.clearTargetJpgPng(file); } String getSuffix() { return LatexPreProcessor.SUFFIX_PNG; } }, /** * Handler for .tex-files * representing the TeX format, to be more precise the LaTeX format. */ tex { void scheduleProcSrc(File file, Map<File, SuffixHandler> file2handler, LatexPreProcessor proc, Collection<File> latexMainFiles) { file2handler.put(file, this);// super // may log WFU03, WPP02 proc.addIfLatexMain(file, latexMainFiles); } void procSrc(File file, LatexPreProcessor proc) { // do nothing: no source } void clearTarget(File file, LatexPreProcessor proc, Map<File, SuffixHandler> file2handler) { // may log WPP02, WFU01, WFU03, EFU05 proc.clearTargetTexIfLatexMain(file); } void clearTarget(File file, LatexPreProcessor proc) { throw new IllegalStateException ("Clearing targets of '" + file + "' should have been done already. "); } String getSuffix() { return LatexPreProcessor.SUFFIX_TEX; } }, /** * Handler for .bib-files * representing the BibTeX format for bibliographies. */ bib { void procSrc(File file, LatexPreProcessor proc) { proc.log.info("Found bibliography file '" + file + "'. "); } void clearTarget(File file, LatexPreProcessor proc, Map<File, SuffixHandler> file2handler) { // do not add to file2handler } void clearTarget(File file, LatexPreProcessor proc) { throw new IllegalStateException ("File '" + file + "' has no targets to be cleared. "); } String getSuffix() { return LatexPreProcessor.SUFFIX_BIB; } }; // essentially, maps file to its handler // overwritten for tex: in addition add to latexMainFiles void scheduleProcSrc(File file, Map<File, SuffixHandler> file2handler, LatexPreProcessor proc, Collection<File> latexMainFiles) { file2handler.put(file, this); } // FIXME: to be updated // if a graphic format: process source. // For tex and for bib: do nothing. /** * Typically, .i.e. for {@link #fig}-, {@link #gp}-, {@link #mp}- * and associates <code>file</code> * Does the transformation of the file <code>file</code> * using <code>proc</code> immediately, except for * <ul> * <li> * {@link #svg}-files for which an info message is logged, * that transformation is done by need in the course of a LaTeX run. * What occurs are files .pdf and .pdf_tex * even if {@link Settings#pdfViaDvi} indicates creation of dvi files. * <li> * {@link #tex}-files which are only scheduled for later translation * just by adding them to <code>latexMainFiles</code> * if they are latex main files, and ignored otherwise * (see {@link LatexPreProcessor#addIfLatexMain(File, Collection)}). * <li> * {@link #bib}-files for which just an info message * that a bib file was found is logged. * </ul> * <p> * Logging: * <ul> * <li> WFU03: cannot close * <li> WPP02: tex file may be latex main file * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if applications for preprocessing graphic files failed. * <li> EFU06: if moving a file fails. * </ul> * * @param file * a file with ending given by {@link #getSuffix()}. * @param proc * a latex pre-processor. * @throws BuildFailureException * TEX01 only for {@link #fig}, {@link #gp} and {@link #mp} * because these invoke external programs. */ abstract void procSrc(File file, LatexPreProcessor proc) throws BuildFailureException; /** * Typically, .i.e. for {@link #fig}-, {@link #gp}-, {@link #mp}- * and {@link #svg}-files just associates <code>file</code> * with this handler in <code>file2handler</code> * to schedule according targets for deletion except for * <ul> * <li> * {@link #tex}-files for which the target is cleared immediately * if it is a latex main file, otherwise ignoring * by invoking {@link #clearTargetTexIfLatexMain(File)}. * <li> * {@link #bib}-files * (maybe appropriate also for jpg-files and for png-files) * for which there are no targets * and so the association is not added to <code>file2handler</code>. * </ul> * <p> * Logging: * <ul> * <li> WPP02: tex file may be latex main file * <li> WFU01: Cannot read directory... * <li> WFU03: cannot close tex file * <li> EFU05: Failed to delete file * <ul> * * @param file * a file with ending given by {@link #getSuffix()}, * i.e. a file which can be handled by this handler. * @param proc * a latex pre-processor. * @param file2handler * maps <code>file</code> to its handler. * In general, this method adds * <code>file</code> to <code>file2handler</code> * together with its handler which is just <code>this</code>. * @see #clearTarget(File, LatexPreProcessor) */ // overwritten for tex, jpg, png and for bib // appropriate for svg although file may be removed from map later // used in clearCreated(File, DirNode) only void clearTarget(File file, LatexPreProcessor proc, Map<File, SuffixHandler> file2handler) { file2handler.put(file, this); } /** * Deletes the files potentially * created from the source file <code>file</code> * using <code>proc</code>. * <p> * Logging: * <ul> * <li> WFU01: Cannot read directory... * <li> EFU05: Failed to delete file * <ul> * * @param file * a file with ending given by {@link #getSuffix()}. * @param proc * a latex pre-processor. * @throws IllegalStateException * <ul> * <li> * if <code>file</code> has no targets to be deleted * as for jpg-files, png-files and bib-files. * <li> * if targets of <code>file</code> should have been cleared already * by {@link #clearTarget(File, LatexPreProcessor, Map)} * as for tex-files. * </ul> * @see #clearTarget(File, LatexPreProcessor, Map) */ // used in clearCreated(File, DirNode) only abstract void clearTarget(File file, LatexPreProcessor proc); /** * Returns the suffix of the file type * of the file type, this is the handler for. */ abstract String getSuffix(); } // enum SuffixHandler // FIXME: CAUTION with including pictures in xfig: // This is done as reference to included file. // Thus it breaks depencency chain. // The following shows the supported formats: // l.10 \includegraphics{02gp2pdf000} // % // I could not locate the file with any of these extensions: // .pdf,.PDF,.ai,.AI,.png,.PNG,.jpg,.JPG,.jpeg,.JPEG,.bmp,.BMP,.ps,.PS,.eps,.EPS,. // pz,.eps.Z,.ps.Z,.ps.gz,.eps.gz // Try typing <return> to proceed. // If that doesn't work, type X <return> to quit. // ) // :<- // Package srcltx Info: Expanded filename `03someGnuplot.ptx' to `03someGnuplot.pt // x.tex' on input line 949. // FIXME: allow variants: // - pdfhandler on .pdf,.PDF, (includable directly with pdflatex) // - png/jpghandler on .png,.PNG,.jpg,.JPG,.jpeg,.JPEG, // - maybe also for .fig // FIXME: questions: // - how to include .pdf into .dvi? // - how to include .eps into .pdf? // Question: how to transform ps into eps? // Research on the following: // .ai,.AI,.bmp,.BMP, // .ps,.PS,.eps,.EPS,. // pz,.eps.Z,.ps.Z,.ps.gz,.eps.gz // FIXME: decide whether suffix .ptx is replaced by .tex: // Advantage: because this is what it is. // Disadvantage: Requires mechanism // to determine whether tex is created or original // but this works the same as for pdf and for svg. /** * Converts the fig-file <code>figFile</code> * into a tex-file with ending ptx * including a pdf-file or an eps-file also created. * To that end, invokes {@link #runFig2DevInTex(File, LatexDev)} twice * to create a pdf-file and an eps-file * and invokes {@link #runFig2TexInclDev(File)} (once) * to create the tex-file. * <p> * Logging: * <ul> * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running the fig2dev command failed. * </ul> * * @param figFile * the fig file to be processed. * @param dev * the 'device' which determines whether to create pdf or pstex. * @throws BuildFailureException * TEX01 if invocation of the fig2dev command * returned by {@link Settings#getFig2devCommand()} failed. * This is invoked twice: once for creating the pdf-file * and once for creating the pdf_t-file. * @see #processGraphicsSelectMain(File, DirNode) */ // used in fig.procSrc(File, LatexPreProcessor) only private void runFig2Dev(File figFile) throws BuildFailureException { this.log.info("Processing fig-file '" + figFile + "'. "); // all three // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 runFig2DevInTex(figFile, LatexDev.pdf); runFig2DevInTex(figFile, LatexDev.dvips); runFig2TexInclDev(figFile); } /** * From <code>figFile</code> create pdf/eps-file * containing graphics without text with special flag set. * The output format depends on <code>dev</code>. * The resulting file is included in some tex-file * created by {@link #runFig2TexInclDev(File)}. * Conversion is done by {@link Settings#getFig2devCommand()}. * <p> * Logging: FIXME: * EEX01, EEX02, EEX03, WEX04, WEX05 * * @param figFile * the fig-file to be processed * @param dev * represents the target: either a pdf-file or an eps-file. * @throws BuildFailureException * FIXME: TEX01, */ private void runFig2DevInTex(File figFile, LatexDev dev) throws BuildFailureException { // Result file: either .pdf or .eps File figInTexFile = this.fileUtils .replaceSuffix(figFile, dev.getGraphicsInTexSuffix()); String command = this.settings.getFig2devCommand(); //if (update(figFile, pdfFile)) { String[] args = buildArgumentsFig2PdfEps(dev.getXFigInTexLanguage(), this.settings.getFig2devGenOptions(), this.settings.getFig2devPdfEpsOptions(), figFile, figInTexFile); this.log.debug("Running " + command + " -L pdftex/pstex ... on '" + figFile.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(figFile.getParentFile(), this.settings.getTexPath(), //**** command, args, figInTexFile); //} } // // PSTEX Options: // -b width specify width of blank border around figure (1/72 inch) // Found: affects clipping path and bounding box only. // Not usable if latex text is used because parts no longer fit. // -F use correct font sizes (points instead of 1/80inch) // Found: no effect // -g color background color // No idea in which format color is given. // -n name set title part of PostScript output to name // Found: works. Without it is just the name of xxx.fig // // the strange thing is that this is only a subset of the postscript options // to be verified whether all options apply or not. // The EPS driver has the following differences from PostScript: // o No showpage is generated // because the output is meant to be imported // into another program or document and not printed // o The landscape/portrait options are ignored // o The centering option is ignored // o The multiple-page option is ignored // o The paper size option is ignored // o The x/y offset options are ignored // The EPS driver has the following two special options: // // -B 'Wx [Wy X0 Y0]' // This specifies that the bounding box of the EPS file // should have the width Wx and the height Wy. // Note that it doesn't scale the figure to this size, // it merely sets the bounding box. // If a value less than or equal to 0 is specified for Wx or Wy, // these are set to the width/height respectively of the figure. // Origin is relative to screen (0,0) (upper-left). // Wx, Wy, X0 and Y0 are interpreted // in centimeters or inches depending on the measure // given in the fig-file. // Remember to put either quotes (") or apostrophes (') // to group the arguments to -B. // -R 'Wx [Wy X0 Y0]' // Same as the -B option except that X0 and Y0 // is relative to the lower left corner of the figure. // Remember to put either quotes (") or apostrophes (') // to group the arguments to -R. // The PDF driver uses all the PostScript options. // Explanation: many of these options do not make sense. // Tried: -x, -y to shift: does not work and does not make sense // What makes sense is // -a don't output user's login name (anonymous) // Found: login name occurs nowhere with and without -a // -N convert all colors to grayscale // Found: works // No information on PDFTEX options. // Instead: // // PDF Options: // -a don't output user's login name (anonymous) // -b width specify width of blank border around figure (1/72 inch) // -F use correct font sizes (points instead of 1/80inch) // -g color background color // // seemingly not the same, so maybe separate options required. // -n is pstex but not in pdf, // -a is pdf but not pstex... strange: is postscript /** * Returns an array of options of the form * <code>-L &lt;language> &lt;optionsGen> &lt;optionsPdfEps> xxx.fig xxx.pdf/xxx.eps * </code> for invocation of {@link Settings#getFig2devCommand()} * for creation of the pdf/eps-part of a fig-figure * as done in {@link #runFig2DevInTex(File, LatexDev)}. * * @param language * is the output language * which is either <code>pdftex</code> or <code>pstex</code> * @param optionsGen * the general options, applying to both the pdf/eps part * and the tex part of the figure under consideration. * @param optionsPdfEps * the options, specific for the pdf/eps part (which is the same) * of the figure under consideration. * @param figFile * the fig-file to be transformed. * @param grpFile * the graphics file (pdf/eps-file) * which is the result of the transformation. */ private String[] buildArgumentsFig2PdfEps(String language, String optionsGen, String optionsPdfEps, File figFile, File grpFile) { String[] optionsGenArr = optionsGen .isEmpty() ? new String[0] : optionsGen .split(" "); String[] optionsPdfEpsArr = optionsPdfEps.isEmpty() ? new String[0] : optionsPdfEps.split(" "); int lenSum = optionsGenArr.length + optionsPdfEpsArr.length; // add the four additional options String[] args = new String[lenSum + 4]; // language args[0] = "-L"; args[1] = language; // general options System.arraycopy(optionsGenArr, 0, args, 2, optionsGenArr .length); // language specific options System.arraycopy(optionsPdfEpsArr, 0, args, 2+optionsGenArr.length, optionsPdfEpsArr.length); // input: fig-file args[2+lenSum] = figFile.getName(); // output: pdf/eps-file args[3+lenSum] = grpFile.getName(); return args; } /** * From <code>figFile</code> create tex-file * containing text with special flag set and * including a graphic file containing the rest of <code>figFile</code>. * Inclusion is without file extension and so both possible results * of {@link #runFig2DevInTex(File, LatexDev)} can be included * when compiling with latex. * Conversion is done by {@link Settings#getFig2devCommand()}. * <p> * Logging: FIXME: * warning EEX01, EEX02, EEX03, WEX04, WEX05 * * @param figFile * the fig-file to be processed * @throws BuildFailureException * FIXME: TEX01, */ private void runFig2TexInclDev(File figFile) throws BuildFailureException { // result file: .ptx File ptxFile = this.fileUtils.replaceSuffix(figFile, SUFFIX_PTX); String command = this.settings.getFig2devCommand(); //if (update(figFile, pdf_tFile)) { String[] args = buildArgumentsFig2Ptx(this.settings.getFig2devGenOptions(), this.settings.getFig2devPtxOptions(), figFile, ptxFile); this.log.debug("Running " + command + " -L (pdf/ps)tex_t... on '" + figFile.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(figFile.getParentFile(), this.settings.getTexPath(), //**** command, args, ptxFile); //} } /** * The name of the language * used by the {@link Settings#getFig2devCommand()} * to specify ``special'' text without graphic of an xfig-picture. * Note that the languages <code>pdftex_t</code> and <code>pstex_t</code> * are equivalent. */ private final static String XFIG_TEX_LANGUAGE = "pdftex_t"; // Since pstex_t is equivalent with pdftex_t, // also the options are the same (hopefully) // // PSTEX_T Options: // -b width specify width of blank border around figure (1/72 inch) // -E num set encoding for text translation (0 no translation, // 1 ISO-8859-1, 2 ISO-8859-2) // -F don't set font family/series/shape, so you can // set it from latex // -p name name of the PostScript file to be overlaid /** * Returns an array of options of the form * <code>-L &lt;language> &lt;optionsGen> &lt;optionsPdfEps> -p xxx xxx.fig xxx.ptx</code> for invocation of {@link Settings#getFig2devCommand()} * for creation of the tex-part of a fig-figure * as done in {@link #runFig2TexInclDev(File)}. * Note that the option <code>-p xxx</code> * specifies the name of the pdf/eps-file * included in the result file <code>ptxFile</code> * without suffix. * * @param language * is the output language * which is either <code>pdftex_t</code> or <code>pstex_t</code> * (which yield the same result). * @param optionsGen * the general options, applying to both the pdf/eps part * and the tex part of the figure under consideration. * @param optionsPtx * the options, specific for the tex part * of the figure under consideration (for the ptx-file). * @param figFile * the fig-file to be transformed. * @param ptxFile * the ptx-file which is the result of the transformation. */ private String[] buildArgumentsFig2Ptx(String optionsGen, String optionsPtx, File figFile, File ptxFile) { String[] optionsGenArr = optionsGen.isEmpty() ? new String[0] : optionsGen.split(" "); String[] optionsPtxArr = optionsPtx.isEmpty() ? new String[0] : optionsPtx.split(" "); int lenSum = optionsGenArr.length +optionsPtxArr.length; // add the six additional options String[] args = new String[lenSum + 6]; // language args[0] = "-L"; args[1] = XFIG_TEX_LANGUAGE; // general options System.arraycopy(optionsGenArr, 0, args, 2, optionsGenArr.length); // language specific options System.arraycopy(optionsPtxArr, 0, args, 2+optionsGenArr.length, optionsPtxArr.length); // -p pdf/eps-file name in ptx-file without suffix args[2+lenSum] = "-p"; // full path without suffix args[3+lenSum] = this.fileUtils.replaceSuffix(figFile, SUFFIX_VOID) .getName(); // input: fig-file args[4+lenSum] = figFile.getName(); // output: ptx-file args[5+lenSum] = ptxFile.getName(); return args; } /** * Deletes the files <code>xxx.ptx</code>, <code>xxx.pdf</code> and * <code>xxx.eps</code> * created from the graphic file <code>grpFile</code> * of the form <code>xxx.y</code>. * <p> * Logging: * EFU05: Failed to delete file * * @param grpFile * a graphic file. */ // for formats fig, gp and svg: since all of these create ptx, pdf and eps private void clearTargetPtxPdfEps(File grpFile) { this.log.info("Deleting targets of file '" + grpFile + "'. "); // may log EFU05 deleteIfExists(grpFile, SUFFIX_PTX); deleteIfExists(grpFile, LatexDev.pdf .getGraphicsInTexSuffix());// pdf deleteIfExists(grpFile, LatexDev.dvips.getGraphicsInTexSuffix());// eps } /** * Converts a gnuplot-file into a tex-file with ending ptx * including a pdf-file or an eps-file also created. * To that end, invokes {@link #runGnuplot2Dev(File, LatexDev)} twice * to create a pdf-file and an eps-file * and to create the tex-file which can include both. * <p> * Logging: * <ul> * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running the ptx/pdf-conversion built-in in gnuplot fails. * </ul> * * @param gpFile * the gp-file (gnuplot format) to be converted to pdf and ptx. * @throws BuildFailureException * TEX01 if invocation of the ptx/pdf-conversion built-in * in gnuplot fails. * @see #processGraphicsSelectMain(File, DirNode) */ // used in gp.procSrc(File, LatexPreProcessor) only private void runGnuplot2Dev(File gpFile) throws BuildFailureException { this.log.info("Processing gnuplot-file '" + gpFile + "'. "); // both may throw BuildFailureException TEX01, // and may log EEX01, EEX02, EEX03, WEX04, WEX05 runGnuplot2Dev(gpFile, LatexDev.dvips); runGnuplot2Dev(gpFile, LatexDev.pdf); } // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 private void runGnuplot2Dev(File gpFile, LatexDev dev) throws BuildFailureException { String command = this.settings.getGnuplotCommand(); File grpFile = this.fileUtils.replaceSuffix (gpFile, dev.getGraphicsInTexSuffix()); File ptxFile = this.fileUtils.replaceSuffix(gpFile, SUFFIX_PTX); String[] args = new String[] { "-e", // run a command string "..." with commands sparated by ';' // "set terminal cairolatex " + dev.getGnuplotInTexLanguage() + " " + this.settings.getGnuplotOptions() + ";set output \"" + ptxFile.getName() + "\";load \"" + gpFile.getName() + "\"" }; // if (update(gpFile, ptxFile)) { this.log.debug("Running " + command + " -e... on '" + gpFile.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(gpFile.getParentFile(), //workingDir this.settings.getTexPath(), //**** command, args, grpFile, ptxFile); // } // no check: just warning that no output has been created. } /** * Runs mpost on mp-files to generate mps-files. * <p> * Logging: * <ul> * <li> WFU03: cannot close log file * <li> EAP01: Running <code>command</code> failed. For details... * <li> EAP02: Running <code>command</code> failed. No log file * <li> WAP04: if log file is not readable. * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running the mpost command failed. * </ul> * * @param mpFile * the metapost file to be processed. * @throws BuildFailureException * TEX01 if invocation of the mpost command failed. * @see #processGraphicsSelectMain(File, DirNode) */ // used in mp.procSrc(File, LatexPreProcessor) only private void runMetapost2mps(File mpFile) throws BuildFailureException { this.log.info("Processing metapost-file '" + mpFile + "'. "); String command = this.settings.getMetapostCommand(); File workingDir = mpFile.getParentFile(); // for more information just type mpost --help String[] args = buildArguments(this.settings.getMetapostOptions(), mpFile); this.log.debug("Running " + command + " on '" + mpFile.getName() + "'. "); // FIXME: not check on all created files, // but this is not worse than with latex // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(workingDir, this.settings.getTexPath(), //**** command, args, this.fileUtils.replaceSuffix(mpFile, "1"+SUFFIX_MPS)); // from xxx.mp creates xxx1.mps, xxx.log and xxx.mpx // FIXME: what is xxx.mpx for? File logFile = this.fileUtils.replaceSuffix(mpFile, SUFFIX_LOG); // may log WFU03, EAP01, EAP02, WAP04 logErrs(logFile, command, this.settings.getPatternErrMPost()); // FIXME: what about warnings? } /** * Deletes the graphic files * created from the metapost-file <code>mpFile</code>. * <p> * Logging: * <ul> * <li> WFU01: Cannot read directory ... * <li> EFU05: Failed to delete file * </ul> * * @param mpFile * a metapost file. */ private void clearTargetMp(File mpFile) { this.log.info("Deleting targets of graphic-file '" + mpFile + "'. "); // may log EFU05 deleteIfExists(mpFile, SUFFIX_LOG); deleteIfExists(mpFile, SUFFIX_FLS); deleteIfExists(mpFile, SUFFIX_MPX); // delete files xxxNumber.mps String name1 = mpFile.getName(); final String root = name1.substring(0, name1.lastIndexOf(".")); FileFilter filter = new FileFilter() { public boolean accept(File file) { return !file.isDirectory() && file.getName().matches(root + "\\d+" + SUFFIX_MPS); } }; // may log WFU01, EFU05 this.fileUtils.deleteX(mpFile, filter); } /** * Converts an svg-file into a tex-file with ending ptx * including a pdf-file or an eps-file also created. * To that end, invokes {@link #runSvg2Dev(File, LatexDev, boolean)} twice * to create a pdf-file and an eps-file * and to create the tex-file which can include both. * <p> * Logging: * <ul> * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running the ptx/pdf-conversion built-in in svg2dev fails. * <li> EFU06 if moving a file fails. * </ul> * * @param svgFile * the svg-file to be converted to a pdf-file and a ptx-file. * @throws BuildFailureException * TEX01 if invocation of the ptx/pdf-conversion built-in * in svg2dev fails. * @see #processGraphicsSelectMain(File, DirNode) */ // used in svg.procSrc(File, LatexPreProcessor) only private void runSvg2Dev(File svgFile) throws BuildFailureException { this.log.info("Processing svg-file '" + svgFile + "'. "); // both may throw BuildFailureException TEX01, // and may log EEX01, EEX02, EEX03, WEX04, WEX05 runSvg2Dev(svgFile, LatexDev.pdf, false); // FIXME: avoiding may be wrong runSvg2Dev(svgFile, LatexDev.dvips, true);// that way page=1 is avoided } // FIXME: still the included pdf/eps-file does not occur // with full path in ptx-file // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05, EFU06 private void runSvg2Dev(File svgFile, LatexDev dev, boolean renameTex) throws BuildFailureException { String command = this.settings.getSvg2devCommand(); // full path without suffix File grpFile = this.fileUtils.replaceSuffix(svgFile, SUFFIX_VOID); // FIXME: eliminate literal: comes from .pdf_tex and .eps_tex // dropping .pdf and .eps, respectively File texFile = this.fileUtils.replaceSuffix(svgFile, "_tex"); String[] args = buildNullArguments(this.settings.getSvg2devOptions(), svgFile); args[0] = dev.getSvgExportOption() + grpFile.getName(); this.log.debug("Running " + command + " on '" + svgFile.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(svgFile.getParentFile(), this.settings.getTexPath(), //**** command, args, grpFile, texFile); // rename grpFile and texFile // may log EFU06: cannot move file this.fileUtils.moveOrError(grpFile, this.fileUtils.replaceSuffix (svgFile, dev.getGraphicsInTexSuffix())); if (renameTex) { // may log EFU06: cannot move file this.fileUtils.moveOrError(texFile, this.fileUtils.replaceSuffix (svgFile, SUFFIX_PTX)); } } // Additional research: // Documentation says, that this is needed for interface eps, // but not for interface pdf. // Experiments show, that we can do without it in any case. private void runEbb(File file) throws BuildFailureException { String command = this.settings.getEbbCommand(); File workingDir = file.getParentFile(); String[] args = buildNullArguments(this.settings.getEbbOptions(), file); // Creation of .xbb files for driver dvipdfmx // FIXME: literal args[0] ="-x"; File resFile = this.fileUtils.replaceSuffix(file, SUFFIX_XBB); this.log.debug("Running " + command + " twice on '" + file.getName() + "'. "); // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 this.executor.execute(workingDir, this.settings.getTexPath(), //**** command, args, resFile); // Creation of .bb files for driver dvipdfm // FIXME: literal args[0] ="-m"; resFile = this.fileUtils.replaceSuffix(file, SUFFIX_BB); this.executor.execute(workingDir, this.settings.getTexPath(), //**** command, args, resFile); } /** * Returns an array of strings, * where the 0th entry is <code>null</code> * and a placeholder for option <code>-x</code> or <code>-m</code> * when used by {@link #runEbb(File)} * and for the export option * when used by {@link #runSvg2Dev(File, LatexDev, boolean)} * then follow the options from <code>options</code> * and finally comes the name of <code>file</code>. */ protected static String[] buildNullArguments(String options, File file) { if (options.isEmpty()) { return new String[] {null, file.getName()}; } String[] optionsArr = options.split(" "); String[] args = new String[optionsArr.length+2]; System.arraycopy(optionsArr, 0, args, 1, optionsArr.length); args[args.length-1] = file.getName(); assert args[0] == null; return args; } /** * Deletes the graphic files * created from the svg-file <code>svgFile</code>. * <p> * Logging: * EFU05: Failed to delete file */ private void clearTargetJpgPng(File file) { this.log.info("Deleting targets of jpg/png-file '" + file + "'. "); // may log EFU05 deleteIfExists(file, SUFFIX_XBB); deleteIfExists(file, SUFFIX_BB); // deleteIfExists(svgFile, SUFFIX_PSTEX ); // deleteIfExists(file, SUFFIX_PDF ); // FIXME: this works for pdf but not for dvi: // even in the latter case, .pdf and .pdf_tex are created } /** * * <p> * Logging: * EFU05: Failed to delete file */ private void deleteIfExists(File file, String suffix) { File delFile = this.fileUtils.replaceSuffix(file, suffix); if (!delFile.exists()) { return; } // may log EFU05 this.fileUtils.deleteOrError(delFile); } /** * Returns whether <code>texFile</code> is a latex main file, * provided it is readable. * Otherwise logs a warning and returns <code>false</code>. * <p> * Logging: * <ul> * <li> WFU03: cannot close * <li> WPP02: tex file may be latex main file * <ul> * * @param texFile * the tex-file to decide on whether it is a latex main file. * @return * whether <code>texFile</code> is definitively a latex main file. * If this is not readable, <code>false</code>. */ // used // by addIfLatexMain(File, Collection) and // by clearTargetTexIfLatexMain(File) private boolean isLatexMainFile(File texFile) { assert texFile.exists(); // may log WFU03 cannot close Boolean res = this.fileUtils.matchInFile (texFile, this.settings.getPatternLatexMainFile()); if (res == null) { this.log.warn("WPP02: Cannot read tex file '" + texFile + "'; may bear latex main file. "); return false; } return res; } /** * If the tex-file <code>texFile</code> is a latex main file, * add it to <code>latexMainFiles</code>. * <p> * Logging: * <ul> * <li> WFU03: cannot close * <li> WPP02: tex file may be latex main file * <ul> * * @param texFile * the tex-file to be added to <code>latexMainFiles</code> * if it is a latex main file. * @param latexMainFiles * the collection of latex main files found so far. */ // invoked only by tex.procSrc(File, LatexPreProcessor) private void addIfLatexMain(File texFile, Collection<File> latexMainFiles) { // may log WFU03, WPP02 if (isLatexMainFile(texFile)) { this.log.info("Detected latex-main-file '" + texFile + "'. "); latexMainFiles.add(texFile); } } /** * Deletes the files * created from the tex-file <code>texFile</code>, * if that is a latex main file. * <p> * Logging: * <ul> * <li> WPP02: tex file may be latex main file * <li> WFU01: Cannot read directory... * <li> WFU03: cannot close tex file * <li> EFU05: Failed to delete file * </ul> * * @param texFile * the tex-file of which the created files shall be deleted * if it is a latex main file. */ private void clearTargetTexIfLatexMain(File texFile) { // exclude files which are no latex main files // may log WFU03, WPP02 if (!isLatexMainFile(texFile)) { return; } this.log.info("Deleting targets of latex main file '" + texFile + "'. "); FileFilter filter = this.fileUtils.getFileFilter (texFile, this.settings.getPatternCreatedFromLatexMain()); // may log WFU01, EFU05 this.fileUtils.deleteX(texFile, filter); } /** * Detects files in the directory represented by <code>texNode</code> * and in subdirectories recursively: * <ul> * <li> * those which are in various graphic formats incompatible with LaTeX * are converted into formats which can be inputted or included directly * into a latex file. * <li> * returns the set of latex main files. * </ul> * <p> * Logging: * <ul> * <li> WFU03: cannot close * <li> WPP02: tex file may be latex main file * <li> WPP03: Skipped processing of files with suffixes ... * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if running graphic processors failed. * <li> EFU06: if moving a file fails. * </ul> * * @param dir * represents the tex source directory or a subdirectory. * @param node * a node associated with <code>dir</code>. * @return * the collection of latex main files. * @throws BuildFailureException * TEX01 invoking * {@link #processGraphicsSelectMain(File, DirNode, Collection, Collection)} */ // used in LatexProcessor.create() // and in LatexProcessor.processGraphics() only // where 'node' represents the tex source directory Collection<File> processGraphicsSelectMain(File dir, DirNode node) throws BuildFailureException { Collection<String> skipped = new TreeSet<String>(); Collection<File> latexMainFiles = new TreeSet<File>(); if (this.settings.getReadTexSrcDirRec()) { // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, // WEX04, WEX05, WFU03, WPP02, EFU06 processGraphicsSelectMainRec(dir, node, skipped, latexMainFiles); } else { // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, // WEX04, WEX05, WFU03, WPP02, EFU06 processGraphicsSelectMain (dir, node, skipped, latexMainFiles); } if (!skipped.isEmpty()) { this.log.warn("WPP03: Skipped processing of files with suffixes " + skipped + ". "); } return latexMainFiles; } /** * <p> * Logging: * <ul> * <li> WFU03: cannot close file * <li> EFU06: Cannot move file * <li> WPP02: tex file may be latex main file * <li> EEX01, EEX02, EEX03, WEX04, WEX05: * if applications for preprocessing graphic files failed. * </ul> * * @param dir * represents the tex source directory or a subdirectory. * @param node * a node associated with <code>dir</code>. * @param skipped * the collection of suffixes of files with handling skipped so far * because there is no handler. * FIXME: interesting for files without suffix or for hidden files. * @param latexMainFiles * the collection of latex main files found so far. * @throws BuildFailureException * TEX01 invoking * {@link LatexPreProcessor.SuffixHandler#procSrc(File, LatexPreProcessor)} * only for {@link LatexPreProcessor.SuffixHandler#fig}, * {@link LatexPreProcessor.SuffixHandler#gp} and * {@link LatexPreProcessor.SuffixHandler#mp} * because these invoke external programs. */ private void processGraphicsSelectMain(File dir, DirNode node, Collection<String> skipped, Collection<File> latexMainFiles) throws BuildFailureException { assert node.isValid();// i.e. node.regularFile != null // FIXME: processing of the various graphic files // may lead to overwrite // FIXME: processing of the latex main files // may lead to overwrite of graphic files or their targets File file; String suffix; SuffixHandler handler; Collection<File> latexMainFilesLocal = new TreeSet<File>(); Map<File, SuffixHandler> file2handler = new TreeMap<File, SuffixHandler>(); for (String fileName : node.getRegularFileNames()) { file = new File(dir, fileName); suffix = this.fileUtils.getSuffix(file); handler = SUFFIX2HANDLER.get(suffix); if (handler == null) { this.log.debug("Skipping processing of file '" + file + "'. "); // warning on skipped files even on hidden files. skipped.add(suffix); } else { // Either performs transformation now // or schedule for later (latex main files) // or do nothing if no targets like bib-files // or tex-files to be inputted. // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05 // WFU03, WPP02 if (!file.isHidden()) { handler.scheduleProcSrc(file, file2handler, this, latexMainFilesLocal); } } } // for latexMainFiles.addAll(latexMainFilesLocal); // remove sources from file2handler.keySet() // if created by local latex main files FileFilter filter; for (File lmFile : latexMainFilesLocal) { filter = this.fileUtils.getFileFilter (lmFile, this.settings.getPatternCreatedFromLatexMain()); Iterator<File> iter = file2handler.keySet().iterator(); File src; while (iter.hasNext()) { src = iter.next(); if (filter.accept(src)) { // FIXME: maybe this is too much: // better just one warning per latex main file // or just suffixes, i.e. handlers this.log.warn("WPP04: Skip processing '" + src + "': interpreted as target of '" + lmFile + "'. "); iter.remove(); continue; } // Here, src is not overwritten processing lmFile // FIXME: to be checked, whether this is also true // for targets of src } } // Here process file, except tex (bib at least info) // with associated handler // FIXME: How to ensure, that nothing is overwritten? // NO: if a file is overwritten, then it is no source // and needs no processing for (Map.Entry<File, SuffixHandler> entry : file2handler.entrySet()) { // procSrc may throw BuildFailureException TEX01 // and may log WFU03, WPP02, // EEX01, EEX02, EEX03, WEX04, WEX05 and EFU06 entry.getValue().procSrc(entry.getKey(), this); } } /** * Like * {@link #processGraphicsSelectMainRec(File,DirNode,Collection,Collection)} * but with recursion to subdirectories. */ private void processGraphicsSelectMainRec(File dir, DirNode node, Collection<String> skipped, Collection<File> latexMainFiles) throws BuildFailureException { processGraphicsSelectMain(dir, node, skipped, latexMainFiles); // go on recursively with subdirectories for (Map.Entry<String, DirNode> entry : node.getSubdirs().entrySet()) { // may throw BuildFailureException TEX01, // may log EEX01, EEX02, EEX03, WEX04, WEX05, WPP03 // WFU03, WPP02, EFU06 processGraphicsSelectMainRec(new File(dir, entry.getKey()), entry.getValue(), skipped, latexMainFiles); } } /** * Deletes all created files * in the directory represented by <code>texDir</code> * tracing subdirectories recursively. * For details of deletions within a single directory * see {@link #clearCreated(File, DirNode)}. * <p> * Logging: * <ul> * <li> WPP02: tex file may be latex main file * <li> WFU01: Cannot read directory... * <li> WFU03: cannot close tex file * <li> EFU05: Failed to delete file * </ul> * * @param texDir * represents the tex source directory or a subdirectory. */ // invoked in LatexProcessor.clearAll() only void clearCreated(File texDir) { clearCreated(texDir, new DirNode(texDir, this.fileUtils)); } /** * Deletes all created files * in the directory represented by <code>node</code>, recursively. * In each directory, the sub-directories are not deleted themselves * but cleaned recursively. * The other files are cleaned, i.e. * their targets are deleted in an ordering reverse to creation * proceeding in the following steps: * <ul> * <li> * First the targets of the latex main files are deleted, * whereas the targets of the graphic (source) files * are just scheduled for deletion. * For details see * {@link LatexPreProcessor.SuffixHandler#clearTarget(File, LatexPreProcessor, Map)} * {@link LatexPreProcessor.SuffixHandler#tex#clearTarget(File, LatexPreProcessor, Map)} . * FIXME: what about deletion of a graphic source file in this course? * <li> * Then the graphic source files scheduled are un-scheduled * if deleted by some latex main file. * <li> * Finally, the targets of the graphic souce files are deleted. * FIXME: what if this results in deletion of a graphic source file? * </ul> * Then the files with handler * If a file has a prefix without handler, * (see {@link SuffixHandler#getSuffix()}) it is ignored. * Else its target is cleared as described in * {@link SuffixHandler#clearTarget(File, LatexPreProcessor, Map)}. * <p> * Logging: * <ul> * <li> WPP02: tex file may be latex main file * <li> WFU01: Cannot read directory... * <li> WFU03: cannot close tex file * <li> EFU05: Failed to delete file * </ul> * * @param dir * represents the tex source directory or a subdirectory. * @param node * a node associated with <code>dir</code>. */ private void clearCreated(File dir, DirNode node) { assert dir.isDirectory(); File file; SuffixHandler handler; Map<File, SuffixHandler> file2handler = new TreeMap<File, SuffixHandler>(); for (String fileName : node.getRegularFileNames()) { file = new File(dir, fileName); handler = SUFFIX2HANDLER.get(this.fileUtils.getSuffix(file)); if (handler != null) { // either clear targets now or schedule for clearing // (in particular do nothing if no target) // may log WPP02, WFU01, WFU03, EFU05 handler.clearTarget(file, this, file2handler); } } // clear targets of all still existing files // which just scheduled for clearing for (Map.Entry<File,SuffixHandler> entry : file2handler.entrySet()) { file = entry.getKey(); if (file.exists()) { entry.getValue().clearTarget(file, this); } } for (Map.Entry<String,DirNode> entry : node.getSubdirs().entrySet()) { // may log WPP02, WFU01, WFU03, EFU05 clearCreated(new File(dir, entry.getKey()), entry.getValue()); } } // FIXME: suffix for tex files containing text and including pdf }
@new for selective build: - added parameter texSrcProcDirectory - modified meaning of parameter texSrcDirectory - renamed parameter readTexSrcDirRec-->readTexSrcProcDirRec to that end: - made choice whether texSrcProcDirectory or still texSrcDirectory is required. @doc corrected parameters and links.
maven-latex-plugin/src/main/java/org/m2latex/core/LatexPreProcessor.java
@new for selective build: - added parameter texSrcProcDirectory - modified meaning of parameter texSrcDirectory - renamed parameter readTexSrcDirRec-->readTexSrcProcDirRec to that end: - made choice whether texSrcProcDirectory or still texSrcDirectory is required. @doc corrected parameters and links.
<ide><path>aven-latex-plugin/src/main/java/org/m2latex/core/LatexPreProcessor.java <ide> * <ide> * @param figFile <ide> * the fig file to be processed. <del> * @param dev <del> * the 'device' which determines whether to create pdf or pstex. <ide> * @throws BuildFailureException <ide> * TEX01 if invocation of the fig2dev command <ide> * returned by {@link Settings#getFig2devCommand()} failed. <ide> * included in the result file <code>ptxFile</code> <ide> * without suffix. <ide> * <del> * @param language <del> * is the output language <del> * which is either <code>pdftex_t</code> or <code>pstex_t</code> <del> * (which yield the same result). <ide> * @param optionsGen <ide> * the general options, applying to both the pdf/eps part <ide> * and the tex part of the figure under consideration. <ide> // where 'node' represents the tex source directory <ide> Collection<File> processGraphicsSelectMain(File dir, DirNode node) <ide> throws BuildFailureException { <add> <ide> Collection<String> skipped = new TreeSet<String>(); <ide> Collection<File> latexMainFiles = new TreeSet<File>(); <del> if (this.settings.getReadTexSrcDirRec()) { <add> if (this.settings.getReadTexSrcProcDirRec()) { <ide> // may throw BuildFailureException TEX01, <ide> // may log EEX01, EEX02, EEX03, <ide> // WEX04, WEX05, WFU03, WPP02, EFU06
JavaScript
mit
8030fee798b8ef593f2377e7b5ae7a78f87ac1e3
0
96AA48/rooster.io,96AA48/rooster.io
//lookup.js //Getting local variables via the configuration file. var config = require('./configuration'); var school_id = config().school_id; //Getting first and third party modules var fs = require('fs'); var database = require('mongoskin').db('mongodb://' + config().database); function get(req, res, next, search) { var index = database.collection('index'); easter(search) ? req.easter = easter(search) : null; easter(search) ? search = easter(search).name : null; search = new RegExp(search, 'i'); index.find({$or : [{id : search}, {name : search}, {first_name : search}, {last_name : search}, {username: search}]}).toArray(function (err, database_entry) { if (err) console.warn(err); if (req.easter.type == 'RIP') { require('./auth').is(req, res, function () { res.render('schedule', req); }); } else if (database_entry.length == 1) { database_entry[0].url = make_url(req, database_entry[0]); req.match = database_entry[0]; next(); } else if (database_entry.length == 0) { require('./auth').is(req, res, function () { res.render('not_found', req); }); } else { req.match = database_entry; require('./auth').is(req, res, function () { res.render('list', req); }); } }); } function api(req, callback) { var index = database.collection('index'); var query = RegExp(req.query.name, 'i'); index.find({$or : [{id : query}, {name : query}, {first_name : query}, {last_name : query}, {username: query}, {group: query}]}).toArray(function (err, database_entry) { if (err) callback({'error': err}); else { for (entry of database_entry) {entry.url = make_url(req, entry)} callback({'data': database_entry}); } }); } function list(req, res, next, list) { var index = database.collection('index'); var query = RegExp(list, 'i'); index.find({group: list}).toArray(function (err, database_entry) { if (err) {req.error = err; next();} else { if (database_entry.length < 1) require('./auth').is(req, res, function () { res.render('not_found', req); }); req.match = database_entry; next(); } }); } function make_url(req, database_entry) { var url = 'http://roosters5.gepro-osi.nl/roosters/rooster.php?school=' + school_id + '&type=' + database_entry.type.charAt(0).toUpperCase() + database_entry.type.slice(1) + 'rooster'; switch (database_entry.type) { case 'leerling' : url += '&afdeling=' + database_entry.studentcategory + '&leerling=' + database_entry.id; break; case 'docent' : url += '&docenten=' + database_entry.name; break; case 'lokaal' : url += '&lokalen=' + database_entry.name; break; case 'klas' : url += '&klassen=' + database_entry.name; break; } if (req.query.tab) url += '&tabblad=' + req.query.tab return url; } function easter(search) { console.log('search', search); var list = JSON.parse(fs.readFileSync(__dirname + '/eastereggs.json')); for (entry of list) { if (entry.easter == search.toLowerCase()) return entry; } return null; } module.exports = {'get': get, 'api': api, 'list': list}; //Testing function, if test is passed in the command line will execute a test. if (process.argv[2] == "test") { console.log(easter('aardappel')); }
lookup.js
//lookup.js //Getting local variables via the configuration file. var config = require('./configuration'); var school_id = config().school_id; //Getting first and third party modules var fs = require('fs'); var database = require('mongoskin').db('mongodb://' + config().database); //Initialize the database connection. //Function for looking through the database and finding entries related to the searchterm. function get(req, res, next, search) { var index = database.collection('index'); //Initialize the database collection. easter(search) ? req.easter = easter(search) : null; //Bind the easter object to the request object. easter(search) ? search = easter(search).name : null; //Check if there are any eastereggs matching the search query. search = new RegExp(search, 'i'); //Make regular exeption for ignoring the case (Bram vs BRAM) should return the same. index.find({$or : [{id : search}, {name : search}, {first_name : search}, {last_name : search}, {username: search}]}).toArray(function (err, database_entry) { if (err) console.warn(err); if (req.easter.type == 'RIP') { //There is an easteregg type that is a grave stone for old students and teachers, here's the logical exception for it. require('./auth').is(req, res, function () { //Ask the authentication system if the user is authenticated (will happen several times in the module). res.render('schedule', req); }); } else if (database_entry.length == 1) { //If there was a match in the system with the supplied database_entry[0].url = make_url(req, database_entry[0]); req.match = database_entry[0]; next(); } else if (database_entry.length == 0) { //If there were no matches found. require('./auth').is(req, res, function () { res.render('not_found', req); //Render the not_found page. }); } else { //If there are multiple matches found in the system req.match = database_entry; require('./auth').is(req, res, function () { res.render('list', req); //Render the list view }); } }); } function api(req, callback) { var index = database.collection('index'); var query = RegExp(req.query.name, 'i'); index.find({$or : [{id : query}, {name : query}, {first_name : query}, {last_name : query}, {username: query}, {group: query}]}).toArray(function (err, database_entry) { if (err) callback({'error': err}); else { for (entry of database_entry) {entry.url = make_url(req, entry)} callback({'data': database_entry}); } }); } function list(req, res, next, list) { var index = database.collection('index'); var query = RegExp(list, 'i'); index.find({group: list}).toArray(function (err, database_entry) { if (err) {req.error = err; next();} else { if (database_entry.length < 1) require('./auth').is(req, res, function () { res.render('not_found', req); }); req.match = database_entry; next(); } }); } //Function for making a link out of the given database_entry. function make_url(req, database_entry) { var url = 'http://roosters5.gepro-osi.nl/roosters/rooster.php?school=' + school_id + '&type=' + database_entry.type.charAt(0).toUpperCase() + database_entry.type.slice(1) + 'rooster'; switch (database_entry.type) { case 'leerling' : url += '&afdeling=' + database_entry.studentcategory + '&leerling=' + database_entry.id; break; case 'docent' : url += '&docenten=' + database_entry.name; break; case 'lokaal' : url += '&lokalen=' + database_entry.name; break; case 'klas' : url += '&klassen=' + database_entry.name; break; } if (req.query.tab) url += '&tabblad=' + req.query.tab return url; } //Function for checking the given search query for eatereggs. //TODO: add a way to supply a template file for eastereggs. function easter(search) { console.log('search', search); var list = JSON.parse(fs.readFileSync(__dirname + '/eastereggs.json')); for (entry of list) { if (entry.easter == search.toLowerCase()) return entry; } return null; } module.exports = {'get': get, 'api': api, 'list': list}; //Testing function, if test is passed in the command line will execute a test. if (process.argv[2] == "test") { console.log(easter('aardappel')); }
Removed all comments from lookup.js
lookup.js
Removed all comments from lookup.js
<ide><path>ookup.js <ide> <ide> //Getting first and third party modules <ide> var fs = require('fs'); <del>var database = require('mongoskin').db('mongodb://' + config().database); //Initialize the database connection. <add>var database = require('mongoskin').db('mongodb://' + config().database); <ide> <del>//Function for looking through the database and finding entries related to the searchterm. <ide> function get(req, res, next, search) { <del> var index = database.collection('index'); //Initialize the database collection. <del> easter(search) ? req.easter = easter(search) : null; //Bind the easter object to the request object. <del> easter(search) ? search = easter(search).name : null; //Check if there are any eastereggs matching the search query. <del> search = new RegExp(search, 'i'); //Make regular exeption for ignoring the case (Bram vs BRAM) should return the same. <add> var index = database.collection('index'); <add> easter(search) ? req.easter = easter(search) : null; <add> easter(search) ? search = easter(search).name : null; <add> search = new RegExp(search, 'i'); <ide> <ide> index.find({$or : [{id : search}, {name : search}, {first_name : search}, {last_name : search}, {username: search}]}).toArray(function (err, database_entry) { <ide> if (err) console.warn(err); <ide> <del> if (req.easter.type == 'RIP') { //There is an easteregg type that is a grave stone for old students and teachers, here's the logical exception for it. <del> require('./auth').is(req, res, function () { //Ask the authentication system if the user is authenticated (will happen several times in the module). <add> if (req.easter.type == 'RIP') { <add> require('./auth').is(req, res, function () { <ide> res.render('schedule', req); <ide> }); <ide> } <del> else if (database_entry.length == 1) { //If there was a match in the system with the supplied <add> else if (database_entry.length == 1) { <ide> database_entry[0].url = make_url(req, database_entry[0]); <ide> req.match = database_entry[0]; <ide> next(); <ide> } <del> else if (database_entry.length == 0) { //If there were no matches found. <add> else if (database_entry.length == 0) { <ide> require('./auth').is(req, res, function () { <del> res.render('not_found', req); //Render the not_found page. <add> res.render('not_found', req); <ide> }); <ide> } <del> else { //If there are multiple matches found in the system <add> else { <ide> req.match = database_entry; <ide> require('./auth').is(req, res, function () { <del> res.render('list', req); //Render the list view <add> res.render('list', req); <ide> }); <ide> } <ide> }); <ide> }); <ide> } <ide> <del>//Function for making a link out of the given database_entry. <ide> function make_url(req, database_entry) { <ide> var url = 'http://roosters5.gepro-osi.nl/roosters/rooster.php?school=' + school_id + '&type=' + database_entry.type.charAt(0).toUpperCase() + database_entry.type.slice(1) + 'rooster'; <ide> <ide> return url; <ide> } <ide> <del>//Function for checking the given search query for eatereggs. <del>//TODO: add a way to supply a template file for eastereggs. <ide> function easter(search) { <ide> console.log('search', search); <ide> var list = JSON.parse(fs.readFileSync(__dirname + '/eastereggs.json'));
JavaScript
mit
567832c73542a9b5a5166c4c0a36fa1c3be78684
0
michaeljdennis/go-vue-todos,michaeljdennis/go-vue-todos,skadimoolam/go-vue-todos,skadimoolam/go-vue-todos,michaeljdennis/go-vue-todos,skadimoolam/go-vue-todos
(function(Vue){ "use strict"; new Vue({ el: 'body', data: { tasks: [], newTask: {} }, created: function() { this.$http.get('/tasks').then(function(res) { this.tasks = res.data.items ? res.data.items : []; }); }, methods: { createTask: function() { if (!$.trim(this.newTask.name)) { this.newTask = {}; return; }; this.newTask.done = "false"; this.$http.post('/task', this.newTask).success(function(res) { this.newTask.id = res.created; this.tasks.push(this.newTask); this.newTask = {}; }).error(function(err) { console.log(err); }); }, deleteTask: function(index) { this.$http.delete('/task/' + index).success(function(res) { this.$http.get('/tasks').then(function(res) { this.tasks = res.data.items ? res.data.items : []; }); }).error(function(err) { console.log(err); }); }, updateTask: function(task, completed) { if (completed) { task.done = "true"; } this.$http.put('/task', task).success(function(res) { this.$http.get('/tasks').then(function(res) { this.tasks = res.data.items ? res.data.items : []; }); }).error(function(err) { console.log(err); }); } } }); })(Vue);
public/app.js
(function(Vue){ "use strict"; new Vue({ el: 'body', data: { tasks: [], newTask: {} }, created: function() { this.$http.get('/tasks').then(function(res) { this.tasks = res.data.items ? res.data.items : []; }); }, methods: { createTask: function() { if (!$.trim(this.newTask.name)) { this.newTask = {}; return; }; this.newTask.done = "false"; this.$http.post('/task', this.newTask).success(function(res) { this.newTask.id = res.created; this.tasks.push(this.newTask); this.newTask = {}; }).error(function(err) { console.log(err) }); }, deleteTask: function(index) { this.$http.delete('/task/' + index).success(function(res) { this.$http.get('/tasks').then(function(res) { this.tasks = res.data.items ? res.data.items : []; }); }).error(function(err) { console.log(err) }); }, updateTask: function(task, completed) { if (completed) { task.done = "true" }; this.$http.put('/task', task).success(function(res) { this.$http.get('/tasks').then(function(res) { this.tasks = res.data.items ? res.data.items : []; }); }).error(function(err) { console.log(err) }); } } }) })(Vue);
Fix eslint errors
public/app.js
Fix eslint errors
<ide><path>ublic/app.js <ide> <ide> new Vue({ <ide> el: 'body', <del> <add> <ide> data: { <ide> tasks: [], <ide> newTask: {} <ide> this.tasks.push(this.newTask); <ide> <ide> this.newTask = {}; <del> }).error(function(err) { console.log(err) }); <add> }).error(function(err) { console.log(err); }); <ide> }, <ide> <ide> <ide> this.$http.get('/tasks').then(function(res) { <ide> this.tasks = res.data.items ? res.data.items : []; <ide> }); <del> }).error(function(err) { console.log(err) }); <add> }).error(function(err) { console.log(err); }); <ide> }, <ide> <ide> <ide> updateTask: function(task, completed) { <del> if (completed) { task.done = "true" }; <add> if (completed) { task.done = "true"; } <ide> <ide> this.$http.put('/task', task).success(function(res) { <ide> this.$http.get('/tasks').then(function(res) { <ide> this.tasks = res.data.items ? res.data.items : []; <ide> }); <del> }).error(function(err) { console.log(err) }); <add> }).error(function(err) { console.log(err); }); <ide> } <ide> } <del>}) <add>}); <ide> <ide> <ide> })(Vue);
Java
apache-2.0
6a38869ef2b8134c435f28cc454dac36f39e5614
0
RamblingWare/RamblingWare,RamblingWare/RamblingWare,RamblingWare/Rant,RamblingWare/Rant,RamblingWare/Rant,RamblingWare/RamblingWare
package org.oddox.action; import java.io.PrintWriter; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts2.ServletActionContext; import org.apache.struts2.interceptor.ServletRequestAware; import org.apache.struts2.interceptor.ServletResponseAware; import org.oddox.config.Application; import org.oddox.config.Utils; import org.oddox.objects.Post; import com.opensymphony.xwork2.ActionSupport; /** * View RSS action class * * @author Austin Delamar * @date 12/9/2015 */ public class RssAction extends ActionSupport implements ServletResponseAware, ServletRequestAware { private static final long serialVersionUID = 1L; protected HttpServletResponse servletResponse; protected HttpServletRequest servletRequest; private static long cacheTime = 0l; private static List<Post> posts = null; /** * Returns RSS information. * * @return Action String */ public String execute() { // /rss // this page shows the XML RSS feed String response = "<?xml version=\"1.0\"?>" + "<rss version=\"2.0\">\n" + "<channel>\n" + "<title>RamblingWare Blog</title>\n" + "<description>This is my blog about computers, programming, tech, and things that bother me. I hope it bothers you too.</description>\n" + "<link>https://www.ramblingware.com</link>" + "\n" + "<language>en-us</language>\n" + "<webMaster>[email protected]</webMaster>\n" + "<ttl>1440</ttl>\n" + "<skipDays><day>Saturday</day><day>Sunday</day></skipDays>\n" + "<skipHours><hour>0</hour><hour>1</hour><hour>2</hour><hour>3</hour><hour>4</hour><hour>5</hour><hour>6</hour><hour>7</hour>" + "<hour>17</hour><hour>18</hour><hour>19</hour><hour>20</hour><hour>21</hour><hour>22</hour><hour>23</hour></skipHours>\n" + "<copyright>RamblingWare 2017.</copyright>\n"; try { // Has it been 24 hours since fresh RSS check? long diff = Math.abs(System.currentTimeMillis() - cacheTime); if (diff >= 86400000) { // cache expired. // get fresh RSS data posts = Application.getDatabaseService() .getPosts(1, Application.getInt("default.limit"), false); // set new cacheTime cacheTime = System.currentTimeMillis(); } for (Post post : posts) { response += "<item><title>" + post.getTitle() + "</title>\n" + "<description>" + post.getDescription() + "</description>\n" + "<pubDate>" + post.getPublishDateReadable() + "</pubDate>\n" + "<link>https://www.ramblingware.com/blog/" + post.getUri() + "</link>" + "</item>\n"; } // add publish date from latest blog post if (posts != null && !posts.isEmpty()) { response += "<pubDate>" + posts.get(0) .getPublishDateReadable() + "</pubDate>\n"; } response += "<lastBuildDate>" + Utils.getDate() + "</lastBuildDate>\n"; response += "</channel>\n</rss>"; try { // return message to user PrintWriter out = ServletActionContext.getResponse() .getWriter(); ServletActionContext.getResponse() .setContentType("text/xml"); out.write(response); } catch (Exception e) { System.out.println("ERROR: Failed to build RSS feed. " + e.getMessage()); addActionError("Error: " + e.getClass() .getName() + ". " + e.getMessage()); } // no action return return null; } catch (Exception e) { System.out.println("ERROR: Failed to build RSS feed. " + e.getMessage()); addActionError("Error: " + e.getClass() .getName() + ". Please try again later."); return ERROR; } } @Override public void setServletResponse(HttpServletResponse servletResponse) { this.servletResponse = servletResponse; } @Override public void setServletRequest(HttpServletRequest servletRequest) { this.servletRequest = servletRequest; } public static long getCacheTime() { return cacheTime; } public static void setCacheTime(long cacheTime) { RssAction.cacheTime = cacheTime; } }
src/main/java/org/oddox/action/RssAction.java
package org.oddox.action; import java.io.PrintWriter; import java.util.List; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts2.ServletActionContext; import org.apache.struts2.interceptor.ServletRequestAware; import org.apache.struts2.interceptor.ServletResponseAware; import org.oddox.config.Application; import org.oddox.config.Utils; import org.oddox.objects.Post; import com.opensymphony.xwork2.ActionSupport; /** * View RSS action class * * @author Austin Delamar * @date 12/9/2015 */ public class RssAction extends ActionSupport implements ServletResponseAware, ServletRequestAware { private static final long serialVersionUID = 1L; protected HttpServletResponse servletResponse; protected HttpServletRequest servletRequest; private List<Post> posts = null; /** * Returns RSS information. * * @return Action String */ public String execute() { // /rss System.out.println("RSS Feed Requested."); // this page shows the RSS feed String response = "<?xml version=\"1.0\"?>" + "<rss version=\"2.0\">\n" + "<channel>\n" + "<title>RamblingWare Blog</title>\n" + "<description>This is my blog about computers, programming, tech, and things that bother me. I hope it bothers you too.</description>\n" + "<link>https://www.ramblingware.com</link>" + "\n" + "<language>en-us</language>\n" + "<webMaster>[email protected]</webMaster>\n" + "<ttl>1440</ttl>\n" + "<skipDays><day>Saturday</day><day>Sunday</day></skipDays>\n" + "<skipHours><hour>0</hour><hour>1</hour><hour>2</hour><hour>3</hour><hour>4</hour><hour>5</hour><hour>6</hour><hour>7</hour>" + "<hour>17</hour><hour>18</hour><hour>19</hour><hour>20</hour><hour>21</hour><hour>22</hour><hour>23</hour></skipHours>\n" + "<copyright>RamblingWare 2017.</copyright>\n"; try { // gather posts posts = Application.getDatabaseService() .getPosts(1, Application.getInt("default.limit"), false); for (Post post : posts) { response += "<item><title>" + post.getTitle() + "</title>\n" + "<description>" + post.getDescription() + "</description>\n" + "<pubDate>" + post.getPublishDateReadable() + "</pubDate>\n" + "<link>https://www.ramblingware.com/blog/" + post.getUri() + "</link>" + "</item>\n"; } // add publish date from latest blog post if (posts != null && !posts.isEmpty()) { response += "<pubDate>" + posts.get(0) .getPublishDateReadable() + "</pubDate>\n"; } response += "<lastBuildDate>" + Utils.getDate() + "</lastBuildDate>\n"; response += "</channel>\n</rss>"; try { // return message to user PrintWriter out = ServletActionContext.getResponse() .getWriter(); ServletActionContext.getResponse() .setContentType("text/xml"); out.write(response); } catch (Exception e) { System.out.println("ERROR: Failed to build RSS feed. " + e.getMessage()); addActionError("Error: " + e.getClass() .getName() + ". " + e.getMessage()); } // no action return return NONE; } catch (Exception e) { System.out.println("ERROR: Failed to build RSS feed. " + e.getMessage()); addActionError("Error: " + e.getClass() .getName() + ". Please try again later."); return ERROR; } } @Override public void setServletResponse(HttpServletResponse servletResponse) { this.servletResponse = servletResponse; } @Override public void setServletRequest(HttpServletRequest servletRequest) { this.servletRequest = servletRequest; } }
Refractor RSS post calls to cache every 24 hours
src/main/java/org/oddox/action/RssAction.java
Refractor RSS post calls to cache every 24 hours
<ide><path>rc/main/java/org/oddox/action/RssAction.java <ide> private static final long serialVersionUID = 1L; <ide> protected HttpServletResponse servletResponse; <ide> protected HttpServletRequest servletRequest; <del> private List<Post> posts = null; <add> private static long cacheTime = 0l; <add> private static List<Post> posts = null; <ide> <ide> /** <ide> * Returns RSS information. <ide> public String execute() { <ide> <ide> // /rss <del> System.out.println("RSS Feed Requested."); <ide> <del> // this page shows the RSS feed <add> // this page shows the XML RSS feed <ide> String response = "<?xml version=\"1.0\"?>" + "<rss version=\"2.0\">\n" + "<channel>\n" <ide> + "<title>RamblingWare Blog</title>\n" <ide> + "<description>This is my blog about computers, programming, tech, and things that bother me. I hope it bothers you too.</description>\n" <ide> + "<hour>17</hour><hour>18</hour><hour>19</hour><hour>20</hour><hour>21</hour><hour>22</hour><hour>23</hour></skipHours>\n" <ide> + "<copyright>RamblingWare 2017.</copyright>\n"; <ide> try { <add> // Has it been 24 hours since fresh RSS check? <add> long diff = Math.abs(System.currentTimeMillis() - cacheTime); <add> if (diff >= 86400000) { <add> // cache expired. <add> // get fresh RSS data <add> posts = Application.getDatabaseService() <add> .getPosts(1, Application.getInt("default.limit"), false); <ide> <del> // gather posts <del> posts = Application.getDatabaseService() <del> .getPosts(1, Application.getInt("default.limit"), false); <add> // set new cacheTime <add> cacheTime = System.currentTimeMillis(); <add> } <add> <ide> for (Post post : posts) { <ide> response += "<item><title>" + post.getTitle() + "</title>\n" + "<description>" + post.getDescription() <ide> + "</description>\n" + "<pubDate>" + post.getPublishDateReadable() + "</pubDate>\n" <ide> .getName() + ". " + e.getMessage()); <ide> } <ide> // no action return <del> return NONE; <add> return null; <ide> <ide> } catch (Exception e) { <ide> System.out.println("ERROR: Failed to build RSS feed. " + e.getMessage()); <ide> public void setServletRequest(HttpServletRequest servletRequest) { <ide> this.servletRequest = servletRequest; <ide> } <add> <add> public static long getCacheTime() { <add> return cacheTime; <add> } <add> <add> public static void setCacheTime(long cacheTime) { <add> RssAction.cacheTime = cacheTime; <add> } <ide> }
Java
bsd-3-clause
2fe960f529eb25e3441b6ff8e994cc8fc71b3765
0
just6979/ScoreModel,just6979/ScoreIt_P10,just6979/ScoreIt_P10,just6979/ScoreModel
/* * Copyright (c) 2015 Justin White <[email protected]> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package net.justinwhite.score_model; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; @SuppressWarnings("FieldCanBeLocal") public class GameTest { private final Integer numPlayers; private final Player[] playersArray; private final String newPlayerName; private final String[] playerNames; private Game<Player> game; private Integer[] scores; { numPlayers = 4; playerNames = new String[]{"Lauren K", "Justin W", "Tim C", "Denise B"}; playersArray = new Player[numPlayers]; newPlayerName = "Foo Bar"; scores = new Integer[]{0, 0, 0, 0}; } @Before public void setUp() throws Exception { game = new Game<>(Player.class, numPlayers); assertEquals(numPlayers, game.getNumPlayers()); for (Integer i = 0; i < numPlayers; i++) { // populate array for comparisons later playersArray[i] = game.getPlayer(i); // rename players for comparisons later game.getPlayer(i).setName(playerNames[i]); } // check that scores start at 0 assertArrayEquals(scores, game.getScores().toArray()); // set scores for later tests scores = new Integer[]{50, 100, 200, 150}; for (Integer i = 0; i < scores.length; i++) { game.getPlayer(i).setScore(scores[i]); } // check the scores were set correctly assertArrayEquals(scores, game.getScores().toArray()); } @Test public void testConstructors() throws Exception { System.out.print("Testing " + Version.getVersion()); game = new Game<>(Player.class); assertEquals(Game.MIN_PLAYERS, game.getNumPlayers()); game = new Game<>(Player.class, Integer.MIN_VALUE); assertEquals(Game.MIN_PLAYERS, game.getNumPlayers()); game = new Game<>(Player.class, Integer.MAX_VALUE); assertEquals(Game.MAX_PLAYERS, game.getNumPlayers()); } @Test public void testToString() throws Exception { assertEquals(String.format("Game: %s\nUUID: %s\nPlayer count: %d\nPlayers: %s", game.getName(), game.getID(), game.getNumPlayers(), game.getPlayerList() ), game.toString() ); } @Test public void testSetName() throws Exception { String tempName = "Foo Game"; String builtName = Game.buildName(); assertEquals(builtName, game.getName()); game.setName(tempName); assertEquals(tempName, game.getName()); game.setName(""); assertEquals(builtName, game.getName()); } @Test public void testSetNumPlayersUp() throws Exception { // fail if too many players assertFalse(game.setNumPlayers(Integer.MAX_VALUE)); // increase player count by 1 Integer newNumPlayers = numPlayers + 1; // check decrease was successful assertTrue(game.setNumPlayers(newNumPlayers)); // and player count matches assertEquals(newNumPlayers, game.getNumPlayers()); // check last player's name to see if it matches the player count assertEquals("Player " + newNumPlayers, game.getPlayer(game.getNumPlayers() - 1).getName()); } @Test public void testSetNumPlayersDown() throws Exception { // fail if too few players assertFalse(game.setNumPlayers(0)); //decrease player count by 1 Integer newNumPlayers = numPlayers - 1; // check increase was successful and player count matches assertTrue(game.setNumPlayers(newNumPlayers)); assertEquals(newNumPlayers, game.getNumPlayers()); // check last player to see if it matches assertSame(playersArray[newNumPlayers - 1], game.getPlayer(game.getNumPlayers() - 1)); } @Test public void testCheckPlayer() throws Exception { assertTrue(game.checkPlayer(1)); assertFalse(game.checkPlayer(-1)); assertFalse(game.checkPlayer(Integer.MAX_VALUE)); assertFalse(game.checkPlayer(Integer.MIN_VALUE)); } @Test public void testGetPlayer() throws Exception { assertSame(playersArray[0], game.getPlayer(0)); assertNull(game.getPlayer(Integer.MAX_VALUE)); assertNull(game.getPlayer(Integer.MIN_VALUE)); } @Test public void testAddPlayer() throws Exception { // add 1 Player Player newPlayer = game.addPlayer(newPlayerName); assertSame(newPlayer, game.getPlayer(game.getNumPlayers() - 1)); // add Players to the max for (Integer i = game.getNumPlayers(); i < Game.MAX_PLAYERS; i++) { assertNotNull(game.addPlayer()); } // try to add one more and fail assertNull(game.addPlayer()); } @Test public void testRemovePlayer() throws Exception { // save the last player for reference Player lastPlayer = game.getPlayer(game.getNumPlayers() - 1); // remove 1 Player Player oldPlayer = game.removePlayer(); // check it's the same as the former last player assertSame(lastPlayer, oldPlayer); // check it's not the same as the new last player assertNotSame(oldPlayer, game.getPlayer(game.getNumPlayers() - 1)); // check last player's name to see if it matches assertEquals( playersArray[game.getNumPlayers() - 1].getName(), game.getPlayer(game.getNumPlayers() - 1).getName() ); // remove Players, from the beginning, to the minimum for (Integer i = game.getNumPlayers(); i > Game.MIN_PLAYERS; i--) { assertNotNull(game.removePlayer(0)); } // try to remove one more and fail assertNull(game.removePlayer()); } @Test public void testGetScores() throws Exception { } @Test public void testGetScoresText() throws Exception { String result = ""; for (Player p : game.getPlayerList()) { result += String.format("%s: %d Points; ", p.getName(), p.getScore()); } assertEquals(result, game.getScoresText()); } // TODO: test tie-scores @Test public void testCheckWinner() throws Exception { // check we have a winner assertTrue(game.checkWinner()); // check winner matches player set to win in setup assertSame(playersArray[2], game.getWinner()); // reset scores to 0 for (Integer i = 0; i < scores.length; i++) { game.getPlayer(i).setScore(0); } // check there is no winner assertTrue(game.checkWinner()); } @Test public void testGetWinner() throws Exception { } }
src/test/java/net/justinwhite/score_model/GameTest.java
/* * Copyright (c) 2015 Justin White <[email protected]> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of the copyright holder nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package net.justinwhite.score_model; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.*; @SuppressWarnings("FieldCanBeLocal") public class GameTest { private final Integer numPlayers; private final Player[] playersArray; private final String newPlayerName; private final String[] playerNames; private Game<Player> game; private Integer[] scores; { numPlayers = 4; playerNames = new String[]{"Lauren K", "Justin W", "Tim C", "Denise B"}; playersArray = new Player[numPlayers]; newPlayerName = "Foo Bar"; scores = new Integer[]{0, 0, 0, 0}; } @Before public void setUp() throws Exception { game = new Game<>(Player.class, numPlayers); assertEquals(numPlayers, game.getNumPlayers()); for (Integer i = 0; i < numPlayers; i++) { // populate array for comparisons later playersArray[i] = game.getPlayer(i); // rename players for comparisons later game.getPlayer(i).setName(playerNames[i]); } // check that scores start at 0 assertArrayEquals(scores, game.getScores()); // set scores for later tests scores = new Integer[]{50, 100, 200, 150}; for (Integer i = 0; i < scores.length; i++) { game.getPlayer(i).setScore(scores[i]); } // check the scores were set correctly assertArrayEquals(scores, game.getScores()); } @Test public void testGame() throws Exception { System.out.print("Testing " + Version.getVersion()); game = new Game<>(Player.class); assertEquals(Game.MIN_PLAYERS, game.getNumPlayers()); game = new Game<>(Player.class, Integer.MIN_VALUE); assertEquals(Game.MIN_PLAYERS, game.getNumPlayers()); game = new Game<>(Player.class, Integer.MAX_VALUE); assertEquals(Game.MAX_PLAYERS, game.getNumPlayers()); } @Test public void testToString() throws Exception { assertEquals(String.format("Game: %s\nUUID: %s\nPlayer count: %d\nPlayers: %s", game.getName(), game.getID(), game.getNumPlayers(), game.getPlayerList() ), game.toString() ); } @Test public void testSetNumPlayersUp() throws Exception { // fail if too many players assertFalse(game.setNumPlayers(Integer.MAX_VALUE)); // increase player count by 1 Integer newNumPlayers = numPlayers + 1; // check decrease was successful assertTrue(game.setNumPlayers(newNumPlayers)); // and player count matches assertEquals(newNumPlayers, game.getNumPlayers()); // check last player's name to see if it matches the player count assertEquals("Player " + newNumPlayers, game.getPlayer(game.getNumPlayers() - 1).getName()); } @Test public void testSetNumPlayersDown() throws Exception { // fail if too few players assertFalse(game.setNumPlayers(0)); //decrease player count by 1 Integer newNumPlayers = numPlayers - 1; // check increase was successful and player count matches assertTrue(game.setNumPlayers(newNumPlayers)); assertEquals(newNumPlayers, game.getNumPlayers()); // check last player to see if it matches assertSame(playersArray[newNumPlayers - 1], game.getPlayer(game.getNumPlayers() - 1)); } @Test public void testCheckPlayer() throws Exception { assertTrue(game.checkPlayer(1)); assertFalse(game.checkPlayer(Integer.MAX_VALUE)); assertFalse(game.checkPlayer(Integer.MIN_VALUE)); } @Test public void testGetPlayer() throws Exception { assertSame(playersArray[0], game.getPlayer(0)); assertNull(game.getPlayer(Integer.MAX_VALUE)); assertNull(game.getPlayer(Integer.MIN_VALUE)); } @Test public void testAddPlayer() throws Exception { // add 1 Player Player newPlayer = game.addPlayer(newPlayerName); assertSame(newPlayer, game.getPlayer(game.getNumPlayers() - 1)); // add Players to the max for (Integer i = game.getNumPlayers(); i < Game.MAX_PLAYERS; i++) { assertNotNull(game.addPlayer()); } // try to add one more and fail assertNull(game.addPlayer()); } @Test public void testRemovePlayer() throws Exception { // save the last player for reference Player lastPlayer = game.getPlayer(game.getNumPlayers() - 1); // remove 1 Player Player oldPlayer = game.removePlayer(); // check it's the same as the former last player assertSame(lastPlayer, oldPlayer); // check it's not the same as the new last player assertNotSame(oldPlayer, game.getPlayer(game.getNumPlayers() - 1)); // check last player's name to see if it matches assertEquals( playersArray[game.getNumPlayers() - 1].getName(), game.getPlayer(game.getNumPlayers() - 1).getName() ); // remove Players, from the beginning, to the minimum for (Integer i = game.getNumPlayers(); i > Game.MIN_PLAYERS; i--) { assertNotNull(game.removePlayer(0)); } // try to remove one more and fail assertNull(game.removePlayer()); } @Test public void testGetScoresText() throws Exception { String result = ""; for (Player p : game.getPlayerList()) { result += String.format("%s: %d Points; ", p.getName(), p.getScore()); } assertEquals(result, game.getScoresText()); } // TODO: test tie-scores @Test public void testCheckWinner() throws Exception { // check we have a winner assertTrue(game.checkWinner()); // check winner matches player set to win in setup assertSame(playersArray[2], game.getWinner()); // reset scores to 0 for (Integer i = 0; i < scores.length; i++) { game.getPlayer(i).setScore(0); } // check there is no winner assertTrue(game.checkWinner()); } }
Update for array-to-List, add more coverage.
src/test/java/net/justinwhite/score_model/GameTest.java
Update for array-to-List, add more coverage.
<ide><path>rc/test/java/net/justinwhite/score_model/GameTest.java <ide> game.getPlayer(i).setName(playerNames[i]); <ide> } <ide> // check that scores start at 0 <del> assertArrayEquals(scores, game.getScores()); <add> assertArrayEquals(scores, game.getScores().toArray()); <ide> // set scores for later tests <ide> scores = new Integer[]{50, 100, 200, 150}; <ide> for (Integer i = 0; i < scores.length; i++) { <ide> game.getPlayer(i).setScore(scores[i]); <ide> } <ide> // check the scores were set correctly <del> assertArrayEquals(scores, game.getScores()); <del> } <del> <del> @Test <del> public void testGame() throws Exception { <add> assertArrayEquals(scores, game.getScores().toArray()); <add> } <add> <add> @Test <add> public void testConstructors() throws Exception { <ide> System.out.print("Testing " + Version.getVersion()); <ide> game = new Game<>(Player.class); <ide> assertEquals(Game.MIN_PLAYERS, game.getNumPlayers()); <ide> @Test <ide> public void testToString() throws Exception { <ide> assertEquals(String.format("Game: %s\nUUID: %s\nPlayer count: %d\nPlayers: %s", <del> game.getName(), <del> game.getID(), <del> game.getNumPlayers(), <del> game.getPlayerList() <add> game.getName(), <add> game.getID(), <add> game.getNumPlayers(), <add> game.getPlayerList() <ide> ), game.toString() <ide> ); <ide> <add> } <add> <add> @Test <add> public void testSetName() throws Exception { <add> String tempName = "Foo Game"; <add> String builtName = Game.buildName(); <add> assertEquals(builtName, game.getName()); <add> game.setName(tempName); <add> assertEquals(tempName, game.getName()); <add> game.setName(""); <add> assertEquals(builtName, game.getName()); <ide> } <ide> <ide> @Test <ide> assertEquals("Player " + newNumPlayers, game.getPlayer(game.getNumPlayers() - 1).getName()); <ide> } <ide> <add> <ide> @Test <ide> public void testSetNumPlayersDown() throws Exception { <ide> // fail if too few players <ide> assertSame(playersArray[newNumPlayers - 1], game.getPlayer(game.getNumPlayers() - 1)); <ide> } <ide> <del> <ide> @Test <ide> public void testCheckPlayer() throws Exception { <ide> assertTrue(game.checkPlayer(1)); <add> assertFalse(game.checkPlayer(-1)); <ide> assertFalse(game.checkPlayer(Integer.MAX_VALUE)); <ide> assertFalse(game.checkPlayer(Integer.MIN_VALUE)); <ide> } <ide> } <ide> <ide> @Test <add> public void testGetScores() throws Exception { <add> <add> } <add> <add> @Test <ide> public void testGetScoresText() throws Exception { <ide> String result = ""; <ide> for (Player p : game.getPlayerList()) { <ide> assertTrue(game.checkWinner()); <ide> } <ide> <add> @Test <add> public void testGetWinner() throws Exception { <add> <add> } <add> <ide> }
Java
apache-2.0
a2c34607027302c2e842265955c5b49afc8859fe
0
md5555/android_packages_services_Telephony
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.services.telephony; import android.content.Context; import android.os.Handler; import android.os.Message; import android.telecomm.CallServiceAdapter; import android.util.Log; import com.android.internal.telephony.Call; import com.android.internal.telephony.CallStateException; import com.android.internal.telephony.Connection; import com.android.phone.PhoneGlobals; /** * Manages a single phone call. Listens to the call's state changes and updates the * CallServiceAdapter. */ class TelephonyCallConnection { private static final String TAG = TelephonyCallConnection.class.getSimpleName(); private static final int EVENT_PRECISE_CALL_STATE_CHANGED = 1; private final String mCallId; private final StateHandler mHandler = new StateHandler(); private CallServiceAdapter mCallServiceAdapter; private Connection mOriginalConnection; private Call.State mState = Call.State.IDLE; TelephonyCallConnection(CallServiceAdapter callServiceAdapter, String callId, Connection connection) { mCallServiceAdapter = callServiceAdapter; mCallId = callId; mOriginalConnection = connection; mOriginalConnection.getCall().getPhone().registerForPreciseCallStateChanged(mHandler, EVENT_PRECISE_CALL_STATE_CHANGED, null); updateState(); } String getCallId() { return mCallId; } Connection getOriginalConnection() { return mOriginalConnection; } void disconnect(boolean shouldAbort) { if (shouldAbort) { mCallServiceAdapter = null; close(); } if (mOriginalConnection != null) { try { mOriginalConnection.hangup(); } catch (CallStateException e) { Log.e(TAG, "Call to Connection.hangup failed with exception", e); } } } void hold() { if (Call.State.ACTIVE == mState) { try { // TODO: This doesn't handle multiple calls across call services yet mOriginalConnection.getCall().getPhone().switchHoldingAndActive(); } catch (CallStateException e) { Log.e(TAG, "Exception occurred while trying to put call on hold.", e); } } else { Log.e(TAG, "Cannot put a call that is not currently active on hold."); } } void unhold() { if (Call.State.HOLDING == mState) { try { // TODO: This doesn't handle multiple calls across call services yet mOriginalConnection.getCall().getPhone().switchHoldingAndActive(); } catch (CallStateException e) { Log.e(TAG, "Exception occurred while trying to release call from hold.", e); } } else { Log.e(TAG, "Cannot release a call that is not already on hold from hold."); } } private void updateState() { if (mOriginalConnection == null || mCallServiceAdapter == null) { return; } Call.State newState = mOriginalConnection.getState(); if (mState == newState) { return; } mState = newState; switch (newState) { case IDLE: break; case ACTIVE: mCallServiceAdapter.setActive(mCallId); break; case HOLDING: mCallServiceAdapter.setOnHold(mCallId); break; case DIALING: case ALERTING: mCallServiceAdapter.setDialing(mCallId); break; case INCOMING: case WAITING: mCallServiceAdapter.setRinging(mCallId); break; case DISCONNECTED: mCallServiceAdapter.setDisconnected(mCallId); close(); break; case DISCONNECTING: break; } } private void close() { if (mOriginalConnection != null) { Call call = mOriginalConnection.getCall(); if (call != null) { call.getPhone().unregisterForPreciseCallStateChanged(mHandler); } mOriginalConnection = null; } CallRegistrar.unregister(mCallId); } private class StateHandler extends Handler { @Override public void handleMessage(Message msg) { switch (msg.what) { case EVENT_PRECISE_CALL_STATE_CHANGED: updateState(); break; } } } }
src/com/android/services/telephony/TelephonyCallConnection.java
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.services.telephony; import android.content.Context; import android.media.AudioManager; import android.os.Handler; import android.os.Message; import android.telecomm.CallServiceAdapter; import android.util.Log; import com.android.internal.telephony.Call; import com.android.internal.telephony.CallStateException; import com.android.internal.telephony.Connection; import com.android.phone.PhoneGlobals; /** * Manages a single phone call. Listens to the call's state changes and updates the * CallServiceAdapter. */ class TelephonyCallConnection { private static final String TAG = TelephonyCallConnection.class.getSimpleName(); private static final int EVENT_PRECISE_CALL_STATE_CHANGED = 1; private final String mCallId; private final StateHandler mHandler = new StateHandler(); private CallServiceAdapter mCallServiceAdapter; private Connection mOriginalConnection; private Call.State mState = Call.State.IDLE; TelephonyCallConnection(CallServiceAdapter callServiceAdapter, String callId, Connection connection) { mCallServiceAdapter = callServiceAdapter; mCallId = callId; mOriginalConnection = connection; mOriginalConnection.getCall().getPhone().registerForPreciseCallStateChanged(mHandler, EVENT_PRECISE_CALL_STATE_CHANGED, null); updateState(); } String getCallId() { return mCallId; } Connection getOriginalConnection() { return mOriginalConnection; } void disconnect(boolean shouldAbort) { if (shouldAbort) { mCallServiceAdapter = null; close(); } if (mOriginalConnection != null) { try { mOriginalConnection.hangup(); } catch (CallStateException e) { Log.e(TAG, "Call to Connection.hangup failed with exception", e); } } } void hold() { if (Call.State.ACTIVE == mState) { try { // TODO: This doesn't handle multiple calls across call services yet mOriginalConnection.getCall().getPhone().switchHoldingAndActive(); } catch (CallStateException e) { Log.e(TAG, "Exception occurred while trying to put call on hold.", e); } } else { Log.e(TAG, "Cannot put a call that is not currently active on hold."); } } void unhold() { if (Call.State.HOLDING == mState) { try { // TODO: This doesn't handle multiple calls across call services yet mOriginalConnection.getCall().getPhone().switchHoldingAndActive(); } catch (CallStateException e) { Log.e(TAG, "Exception occurred while trying to release call from hold.", e); } } else { Log.e(TAG, "Cannot release a call that is not already on hold from hold."); } } private void updateState() { if (mOriginalConnection == null || mCallServiceAdapter == null) { return; } Call.State newState = mOriginalConnection.getState(); if (mState == newState) { return; } mState = newState; switch (newState) { case IDLE: break; case ACTIVE: mCallServiceAdapter.setActive(mCallId); break; case HOLDING: mCallServiceAdapter.setOnHold(mCallId); break; case DIALING: case ALERTING: mCallServiceAdapter.setDialing(mCallId); break; case INCOMING: case WAITING: mCallServiceAdapter.setRinging(mCallId); break; case DISCONNECTED: mCallServiceAdapter.setDisconnected(mCallId); close(); break; case DISCONNECTING: break; } setAudioMode(Call.State.ACTIVE); } private void close() { if (mOriginalConnection != null) { Call call = mOriginalConnection.getCall(); if (call != null) { call.getPhone().unregisterForPreciseCallStateChanged(mHandler); } mOriginalConnection = null; } CallRegistrar.unregister(mCallId); } /** * Sets the audio mode according to the specified state of the call. * TODO(santoscordon): This will not be necessary once Telecomm manages audio focus. This does * not handle multiple calls well, specifically when there are multiple active call services * within services/Telephony. * * @param state The state of the call. */ private static void setAudioMode(Call.State state) { Context context = PhoneGlobals.getInstance(); AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); if (Call.State.ACTIVE == state) { // Set the IN_CALL mode only when the call is active. if (audioManager.getMode() != AudioManager.MODE_IN_CALL) { audioManager.requestAudioFocusForCall( AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); audioManager.setMode(AudioManager.MODE_IN_CALL); } } else { // Non active calls go back to normal mode. This breaks down if there are multiple calls // due to non-deterministic execution order across call services. But that will be fixed // as soon as this moves to Telecomm where it is aware of all active calls. if (audioManager.getMode() != AudioManager.MODE_NORMAL) { audioManager.abandonAudioFocusForCall(); } } } private class StateHandler extends Handler { @Override public void handleMessage(Message msg) { switch (msg.what) { case EVENT_PRECISE_CALL_STATE_CHANGED: updateState(); break; } } } }
Remove audio mode/focus code from telephony. No longer needed now that audio mode and focus is handled by telecomm. Change-Id: Ia311ddc638febbe676d74dd359de1809d5ce92ee
src/com/android/services/telephony/TelephonyCallConnection.java
Remove audio mode/focus code from telephony.
<ide><path>rc/com/android/services/telephony/TelephonyCallConnection.java <ide> package com.android.services.telephony; <ide> <ide> import android.content.Context; <del>import android.media.AudioManager; <ide> import android.os.Handler; <ide> import android.os.Message; <ide> import android.telecomm.CallServiceAdapter; <ide> case DISCONNECTING: <ide> break; <ide> } <del> <del> setAudioMode(Call.State.ACTIVE); <ide> } <ide> <ide> private void close() { <ide> CallRegistrar.unregister(mCallId); <ide> } <ide> <del> /** <del> * Sets the audio mode according to the specified state of the call. <del> * TODO(santoscordon): This will not be necessary once Telecomm manages audio focus. This does <del> * not handle multiple calls well, specifically when there are multiple active call services <del> * within services/Telephony. <del> * <del> * @param state The state of the call. <del> */ <del> private static void setAudioMode(Call.State state) { <del> Context context = PhoneGlobals.getInstance(); <del> AudioManager audioManager = <del> (AudioManager) context.getSystemService(Context.AUDIO_SERVICE); <del> <del> if (Call.State.ACTIVE == state) { <del> // Set the IN_CALL mode only when the call is active. <del> if (audioManager.getMode() != AudioManager.MODE_IN_CALL) { <del> audioManager.requestAudioFocusForCall( <del> AudioManager.STREAM_VOICE_CALL, AudioManager.AUDIOFOCUS_GAIN_TRANSIENT); <del> audioManager.setMode(AudioManager.MODE_IN_CALL); <del> } <del> } else { <del> // Non active calls go back to normal mode. This breaks down if there are multiple calls <del> // due to non-deterministic execution order across call services. But that will be fixed <del> // as soon as this moves to Telecomm where it is aware of all active calls. <del> if (audioManager.getMode() != AudioManager.MODE_NORMAL) { <del> audioManager.abandonAudioFocusForCall(); <del> } <del> } <del> } <del> <ide> private class StateHandler extends Handler { <ide> @Override <ide> public void handleMessage(Message msg) {
Java
apache-2.0
dda0cca6a6679c55ca400688ddbe76f006c7c3c0
0
sonamuthu/rice-1,UniversityOfHawaiiORS/rice,jwillia/kc-rice1,bhutchinson/rice,cniesen/rice,ewestfal/rice-svn2git-test,cniesen/rice,jwillia/kc-rice1,rojlarge/rice-kc,sonamuthu/rice-1,cniesen/rice,bhutchinson/rice,gathreya/rice-kc,bsmith83/rice-1,UniversityOfHawaiiORS/rice,ewestfal/rice,smith750/rice,ewestfal/rice,jwillia/kc-rice1,cniesen/rice,bhutchinson/rice,geothomasp/kualico-rice-kc,rojlarge/rice-kc,kuali/kc-rice,smith750/rice,rojlarge/rice-kc,kuali/kc-rice,UniversityOfHawaiiORS/rice,gathreya/rice-kc,bhutchinson/rice,rojlarge/rice-kc,gathreya/rice-kc,ewestfal/rice-svn2git-test,smith750/rice,kuali/kc-rice,jwillia/kc-rice1,jwillia/kc-rice1,gathreya/rice-kc,ewestfal/rice,smith750/rice,UniversityOfHawaiiORS/rice,ewestfal/rice,kuali/kc-rice,sonamuthu/rice-1,UniversityOfHawaiiORS/rice,gathreya/rice-kc,shahess/rice,smith750/rice,rojlarge/rice-kc,sonamuthu/rice-1,kuali/kc-rice,geothomasp/kualico-rice-kc,bsmith83/rice-1,ewestfal/rice-svn2git-test,cniesen/rice,bsmith83/rice-1,ewestfal/rice,shahess/rice,ewestfal/rice-svn2git-test,geothomasp/kualico-rice-kc,shahess/rice,shahess/rice,geothomasp/kualico-rice-kc,shahess/rice,bsmith83/rice-1,geothomasp/kualico-rice-kc,bhutchinson/rice
/* * Copyright 2005-2006 The Kuali Foundation. * * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kew.dto; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.kuali.rice.core.reflect.DataDefinition; import org.kuali.rice.core.reflect.ObjectDefinition; import org.kuali.rice.core.reflect.PropertyDefinition; import org.kuali.rice.core.resourceloader.GlobalResourceLoader; import org.kuali.rice.kew.actionitem.ActionItem; import org.kuali.rice.kew.actionrequest.ActionRequestFactory; import org.kuali.rice.kew.actionrequest.ActionRequestValue; import org.kuali.rice.kew.actions.AdHocRevoke; import org.kuali.rice.kew.actions.MovePoint; import org.kuali.rice.kew.actions.ValidActions; import org.kuali.rice.kew.actiontaken.ActionTakenValue; import org.kuali.rice.kew.definition.AttributeDefinition; import org.kuali.rice.kew.docsearch.DocSearchCriteriaDTO; import org.kuali.rice.kew.docsearch.DocSearchUtils; import org.kuali.rice.kew.docsearch.DocumentSearchContext; import org.kuali.rice.kew.docsearch.DocumentSearchResult; import org.kuali.rice.kew.docsearch.DocumentSearchResultComponents; import org.kuali.rice.kew.docsearch.SearchableAttribute; import org.kuali.rice.kew.docsearch.web.SearchAttributeFormContainer; import org.kuali.rice.kew.docsearch.xml.GenericXMLSearchableAttribute; import org.kuali.rice.kew.doctype.bo.DocumentType; import org.kuali.rice.kew.engine.CompatUtils; import org.kuali.rice.kew.engine.node.BranchState; import org.kuali.rice.kew.engine.node.KeyValuePair; import org.kuali.rice.kew.engine.node.Process; import org.kuali.rice.kew.engine.node.RouteNode; import org.kuali.rice.kew.engine.node.RouteNodeInstance; import org.kuali.rice.kew.engine.node.State; import org.kuali.rice.kew.engine.simulation.SimulationActionToTake; import org.kuali.rice.kew.engine.simulation.SimulationCriteria; import org.kuali.rice.kew.exception.DocumentTypeNotFoundException; import org.kuali.rice.kew.exception.KEWUserNotFoundException; import org.kuali.rice.kew.exception.WorkflowException; import org.kuali.rice.kew.exception.WorkflowRuntimeException; import org.kuali.rice.kew.lookupable.Column; import org.kuali.rice.kew.notes.Note; import org.kuali.rice.kew.notes.service.NoteService; import org.kuali.rice.kew.postprocessor.ActionTakenEvent; import org.kuali.rice.kew.postprocessor.AfterProcessEvent; import org.kuali.rice.kew.postprocessor.BeforeProcessEvent; import org.kuali.rice.kew.postprocessor.DeleteEvent; import org.kuali.rice.kew.postprocessor.DocumentRouteLevelChange; import org.kuali.rice.kew.postprocessor.DocumentRouteStatusChange; import org.kuali.rice.kew.routeheader.DocumentContent; import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue; import org.kuali.rice.kew.routeheader.StandardDocumentContent; import org.kuali.rice.kew.rule.RuleBaseValues; import org.kuali.rice.kew.rule.RuleDelegation; import org.kuali.rice.kew.rule.RuleExtension; import org.kuali.rice.kew.rule.RuleExtensionValue; import org.kuali.rice.kew.rule.RuleResponsibility; import org.kuali.rice.kew.rule.WorkflowAttribute; import org.kuali.rice.kew.rule.WorkflowAttributeValidationError; import org.kuali.rice.kew.rule.WorkflowAttributeXmlValidator; import org.kuali.rice.kew.rule.bo.RuleAttribute; import org.kuali.rice.kew.rule.xmlrouting.GenericXMLRuleAttribute; import org.kuali.rice.kew.service.KEWServiceLocator; import org.kuali.rice.kew.user.AuthenticationUserId; import org.kuali.rice.kew.user.EmplId; import org.kuali.rice.kew.user.Recipient; import org.kuali.rice.kew.user.RoleRecipient; import org.kuali.rice.kew.user.UserId; import org.kuali.rice.kew.user.UuId; import org.kuali.rice.kew.user.WorkflowUser; import org.kuali.rice.kew.user.WorkflowUserId; import org.kuali.rice.kew.util.KEWConstants; import org.kuali.rice.kew.util.ResponsibleParty; import org.kuali.rice.kew.util.Utilities; import org.kuali.rice.kew.util.XmlHelper; import org.kuali.rice.kew.web.KeyValueSort; import org.kuali.rice.kew.workgroup.GroupId; import org.kuali.rice.kew.workgroup.GroupNameId; import org.kuali.rice.kew.workgroup.WorkflowGroupId; import org.kuali.rice.kew.workgroup.Workgroup; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; /** * Translates Workflow server side beans into client side VO beans. * * @author Kuali Rice Team ([email protected]) */ public class DTOConverter { private static final Logger LOG = Logger.getLogger(DTOConverter.class); public static RouteHeaderDTO convertRouteHeader(DocumentRouteHeaderValue routeHeader, WorkflowUser user) throws WorkflowException, KEWUserNotFoundException { RouteHeaderDTO routeHeaderVO = new RouteHeaderDTO(); if (routeHeader == null) { return null; } populateRouteHeaderVO(routeHeaderVO, routeHeader); if (user != null) { routeHeaderVO.setUserBlanketApprover(false); // default to false if (routeHeader.getDocumentType() != null) { routeHeaderVO.setUserBlanketApprover(routeHeader.getDocumentType().isUserBlanketApprover(user)); } String topActionRequested = KEWConstants.ACTION_REQUEST_FYI_REQ; for (Iterator iter = routeHeader.getActionRequests().iterator(); iter.hasNext();) { ActionRequestValue actionRequest = (ActionRequestValue) iter.next(); // below will control what buttons are drawn on the client we only want the // heaviest action button to show on the client making this code a little combersome if (actionRequest.isRecipientRoutedRequest(user) && actionRequest.isActive()) { int actionRequestComparison = ActionRequestValue.compareActionCode(actionRequest.getActionRequested(), topActionRequested); if (actionRequest.isFYIRequest() && actionRequestComparison >= 0) { routeHeaderVO.setFyiRequested(true); } else if (actionRequest.isAcknowledgeRequest() && actionRequestComparison >= 0) { routeHeaderVO.setAckRequested(true); routeHeaderVO.setFyiRequested(false); topActionRequested = actionRequest.getActionRequested(); } else if (actionRequest.isApproveRequest() && actionRequestComparison >= 0) { routeHeaderVO.setApproveRequested(true); routeHeaderVO.setAckRequested(false); routeHeaderVO.setFyiRequested(false); topActionRequested = actionRequest.getActionRequested(); if (actionRequest.isCompleteRequst()) { routeHeaderVO.setCompleteRequested(true); } } } } // Update notes and notesToDelete arrays in routeHeaderVO routeHeaderVO.setNotesToDelete(null); routeHeaderVO.setNotes(convertNotesArrayListToNoteVOArray(routeHeader.getNotes())); } if (user != null) { routeHeaderVO.setValidActions(convertValidActions(KEWServiceLocator.getActionRegistry().getValidActions(user, routeHeader))); } return routeHeaderVO; } public static RouteHeaderDTO convertActionListRouteHeader(DocumentRouteHeaderValue routeHeader, WorkflowUser user) throws WorkflowException, KEWUserNotFoundException { RouteHeaderDTO routeHeaderVO = new RouteHeaderDTO(); if (routeHeader == null) { return null; } populateRouteHeaderVO(routeHeaderVO, routeHeader); if (user != null) { routeHeaderVO.setUserBlanketApprover(false); // default to false if (routeHeader.getDocumentType() != null) { routeHeaderVO.setUserBlanketApprover(routeHeader.getDocumentType().isUserBlanketApprover(user)); } String topActionRequested = KEWConstants.ACTION_REQUEST_FYI_REQ; for (Iterator iter = routeHeader.getActionRequests().iterator(); iter.hasNext();) { ActionRequestValue actionRequest = (ActionRequestValue) iter.next(); // below will control what buttons are drawn on the client we only want the // heaviest action button to show on the client making this code a little combersome if (actionRequest.isRecipientRoutedRequest(user) && actionRequest.isActive()) { int actionRequestComparison = ActionRequestValue.compareActionCode(actionRequest.getActionRequested(), topActionRequested); if (actionRequest.isFYIRequest() && actionRequestComparison >= 0) { routeHeaderVO.setFyiRequested(true); } else if (actionRequest.isAcknowledgeRequest() && actionRequestComparison >= 0) { routeHeaderVO.setAckRequested(true); routeHeaderVO.setFyiRequested(false); topActionRequested = actionRequest.getActionRequested(); } else if (actionRequest.isApproveRequest() && actionRequestComparison >= 0) { routeHeaderVO.setApproveRequested(true); routeHeaderVO.setAckRequested(false); routeHeaderVO.setFyiRequested(false); topActionRequested = actionRequest.getActionRequested(); if (actionRequest.isCompleteRequst()) { routeHeaderVO.setCompleteRequested(true); } } } } } routeHeaderVO.setValidActions(convertValidActions(KEWServiceLocator.getActionRegistry().getValidActions(user, routeHeader))); return routeHeaderVO; } public static ValidActionsDTO convertValidActions(ValidActions validActions) { ValidActionsDTO validActionsVO = new ValidActionsDTO(); for (Iterator iter = validActions.getActionTakenCodes().iterator(); iter.hasNext();) { String actionTakenCode = (String) iter.next(); validActionsVO.addValidActionsAllowed(actionTakenCode); } return validActionsVO; } private static void populateRouteHeaderVO(RouteHeaderDTO routeHeaderVO, DocumentRouteHeaderValue routeHeader) throws WorkflowException { routeHeaderVO.setRouteHeaderId(routeHeader.getRouteHeaderId()); routeHeaderVO.setAppDocId(routeHeader.getAppDocId()); routeHeaderVO.setDateApproved(Utilities.convertTimestamp(routeHeader.getApprovedDate())); routeHeaderVO.setDateCreated(Utilities.convertTimestamp(routeHeader.getCreateDate())); routeHeaderVO.setDateFinalized(Utilities.convertTimestamp(routeHeader.getFinalizedDate())); routeHeaderVO.setDateLastModified(Utilities.convertTimestamp(routeHeader.getStatusModDate())); /** * This is the original code which set everything up for lazy loading of document content */ // by default, a non-initialized document content object will be sent so that it can be fetched lazily // DocumentContentVO documentContentVO = new DocumentContentVO(); // documentContentVO.setRouteHeaderId(routeHeader.getRouteHeaderId()); // routeHeaderVO.setDocumentContent(documentContentVO); /** * Since we removed the lazy loading in the 2.3 release, this is the code which bypasses lazy loading */ // routeHeaderVO.setDocumentContent(convertDocumentContent(routeHeader.getDocContent(), // routeHeader.getRouteHeaderId())); routeHeaderVO.setDocRouteLevel(routeHeader.getDocRouteLevel()); routeHeaderVO.setCurrentRouteNodeNames(routeHeader.getCurrentRouteLevelName()); /* * Collection activeNodes = * SpringServiceLocator.getRouteNodeService().getActiveNodeInstances(routeHeaderVO.getRouteHeaderId()); * routeHeaderVO.setNodeNames(new String[activeNodes.size()]); int index = 0; for (Iterator iterator = * activeNodes.iterator(); iterator.hasNext();) { RouteNodeInstance nodeInstance = (RouteNodeInstance) * iterator.next(); routeHeaderVO.getNodeNames()[index++] = nodeInstance.getRouteNode().getRouteNodeName(); } */ routeHeaderVO.setDocRouteStatus(routeHeader.getDocRouteStatus()); routeHeaderVO.setDocTitle(routeHeader.getDocTitle()); if (routeHeader.getDocumentType() != null) { routeHeaderVO.setDocTypeName(routeHeader.getDocumentType().getName()); routeHeaderVO.setDocumentUrl(routeHeader.getDocumentType().getDocHandlerUrl()); routeHeaderVO.setDocTypeId(routeHeader.getDocumentTypeId()); } routeHeaderVO.setDocVersion(routeHeader.getDocVersion()); routeHeaderVO.setInitiator(convertUser(routeHeader.getInitiatorUser())); routeHeaderVO.setRoutedByUser(convertUser(routeHeader.getRoutedByUser())); /* populate the routeHeaderVO with the document variables */ // FIXME: we assume there is only one for now RouteNodeInstance routeNodeInstance = (RouteNodeInstance) routeHeader.getInitialRouteNodeInstance(0); // Ok, we are using the "branch state" as the arbitrary convenient repository for flow/process/edoc variables // so we need to stuff them into the VO if (routeNodeInstance.getBranch() != null) { List listOfBranchStates = routeNodeInstance.getBranch().getBranchState(); Iterator it = listOfBranchStates.iterator(); while (it.hasNext()) { BranchState bs = (BranchState) it.next(); if (bs.getKey() != null && bs.getKey().startsWith(BranchState.VARIABLE_PREFIX)) { LOG.debug("Setting branch state variable on vo: " + bs.getKey() + "=" + bs.getValue()); routeHeaderVO.setVariable(bs.getKey().substring(BranchState.VARIABLE_PREFIX.length()), bs.getValue()); } } } } public static DocumentRouteHeaderValue convertRouteHeaderVO(RouteHeaderDTO routeHeaderVO) throws WorkflowException, KEWUserNotFoundException { DocumentRouteHeaderValue routeHeader = new DocumentRouteHeaderValue(); routeHeader.setAppDocId(routeHeaderVO.getAppDocId()); routeHeader.setApprovedDate(Utilities.convertCalendar(routeHeaderVO.getDateApproved())); routeHeader.setCreateDate(Utilities.convertCalendar(routeHeaderVO.getDateCreated())); // String updatedDocumentContent = buildUpdatedDocumentContent(routeHeaderVO); // if null is returned from this method it indicates that the document content on the route header // contained no changes, since we are creating a new document here, we will default the // document content approriately if no changes are detected on the incoming DocumentContentVO // if (updatedDocumentContent != null) { // routeHeader.setDocContent(updatedDocumentContent); // } else { // routeHeader.setDocContent(KEWConstants.DEFAULT_DOCUMENT_CONTENT); // } if (StringUtils.isEmpty(routeHeader.getDocContent())) { routeHeader.setDocContent(KEWConstants.DEFAULT_DOCUMENT_CONTENT); } routeHeader.setDocRouteLevel(routeHeaderVO.getDocRouteLevel()); routeHeader.setDocRouteStatus(routeHeaderVO.getDocRouteStatus()); routeHeader.setDocTitle(routeHeaderVO.getDocTitle()); if (routeHeaderVO.getDocTypeName() != null) { DocumentType documentType = KEWServiceLocator.getDocumentTypeService().findByName(routeHeaderVO.getDocTypeName()); if (documentType == null) { throw new DocumentTypeNotFoundException("Could not locate the given document type name: " + routeHeaderVO.getDocTypeName()); } routeHeader.setDocumentTypeId(documentType.getDocumentTypeId()); } routeHeader.setDocVersion(routeHeaderVO.getDocVersion()); routeHeader.setFinalizedDate(Utilities.convertCalendar(routeHeaderVO.getDateFinalized())); if (routeHeaderVO.getInitiator() != null) { routeHeader.setInitiatorWorkflowId(routeHeaderVO.getInitiator().getWorkflowId()); } if (routeHeaderVO.getRoutedByUser() != null) { routeHeader.setRoutedByUserWorkflowId(routeHeaderVO.getRoutedByUser().getWorkflowId()); } routeHeader.setRouteHeaderId(routeHeaderVO.getRouteHeaderId()); routeHeader.setStatusModDate(Utilities.convertCalendar(routeHeaderVO.getDateLastModified())); return routeHeader; } public static ActionItemDTO convertActionItem(ActionItem actionItem) throws KEWUserNotFoundException { ActionItemDTO actionItemVO = new ActionItemDTO(); actionItemVO.setActionItemId(actionItem.getActionItemId()); actionItemVO.setActionItemIndex(actionItem.getActionItemIndex()); actionItemVO.setActionRequestCd(actionItem.getActionRequestCd()); actionItemVO.setActionRequestId(actionItem.getActionRequestId()); actionItemVO.setActionToTake(actionItem.getActionToTake()); actionItemVO.setDateAssigned(actionItem.getDateAssigned()); actionItemVO.setDateAssignedString(actionItem.getDateAssignedString()); actionItemVO.setDelegationType(actionItem.getDelegationType()); actionItemVO.setDelegatorWorkflowId(actionItem.getDelegatorWorkflowId()); if (StringUtils.isNotEmpty(actionItem.getDelegatorWorkflowId())) { actionItemVO.setDelegatorUser(convertUser(actionItem.getDelegatorUser())); } actionItemVO.setDelegatorWorkgroupId(actionItem.getDelegatorWorkgroupId()); if (actionItem.getDelegatorWorkgroupId() != null) { actionItemVO.setDelegatorWorkgroup(convertWorkgroup(actionItem.getDelegatorWorkgroup())); } actionItemVO.setDocHandlerURL(actionItem.getDocHandlerURL()); actionItemVO.setDocLabel(actionItem.getDocLabel()); actionItemVO.setDocName(actionItem.getDocName()); actionItemVO.setDocTitle(actionItem.getDocTitle()); actionItemVO.setResponsibilityId(actionItem.getResponsibilityId()); actionItemVO.setRoleName(actionItem.getRoleName()); actionItemVO.setRouteHeaderId(actionItem.getRouteHeaderId()); actionItemVO.setWorkflowId(actionItem.getWorkflowId()); if (StringUtils.isNotEmpty(actionItem.getWorkflowId())) { actionItemVO.setUser(convertUser(actionItem.getUser())); } actionItemVO.setWorkgroupId(actionItem.getWorkgroupId()); if (actionItem.getWorkgroupId() != null) { actionItemVO.setWorkgroup(convertWorkgroup(actionItem.getWorkgroup())); } return actionItemVO; } /** * Converts the given DocumentContentVO to a document content string. This method considers existing content on the * document and updates approriately. The string returned will be the new document content for the document. If null is * returned, then the document content is unchanged. */ public static String buildUpdatedDocumentContent(DocumentContentDTO documentContentVO) throws WorkflowException { DocumentType documentType = null; String documentContent = KEWConstants.DEFAULT_DOCUMENT_CONTENT; try { // parse the existing content on the document String existingDocContent = KEWConstants.DEFAULT_DOCUMENT_CONTENT; if (documentContentVO.getRouteHeaderId() != null) { DocumentRouteHeaderValue document = KEWServiceLocator.getRouteHeaderService().getRouteHeader(documentContentVO.getRouteHeaderId()); documentType = document.getDocumentType(); existingDocContent = document.getDocContent(); } StandardDocumentContent standardDocContent = new StandardDocumentContent(existingDocContent); DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document document = builder.newDocument(); Element root = document.createElement(KEWConstants.DOCUMENT_CONTENT_ELEMENT); document.appendChild(root); Element applicationContentElement = standardDocContent.getApplicationContent(); if (documentContentVO.getApplicationContent() != null) { // application content has changed if (!Utilities.isEmpty(documentContentVO.getApplicationContent())) { applicationContentElement = document.createElement(KEWConstants.APPLICATION_CONTENT_ELEMENT); XmlHelper.appendXml(applicationContentElement, documentContentVO.getApplicationContent()); } else { // they've cleared the application content applicationContentElement = null; } } Element attributeContentElement = createDocumentContentSection(document, standardDocContent.getAttributeContent(), documentContentVO.getAttributeDefinitions(), documentContentVO.getAttributeContent(), KEWConstants.ATTRIBUTE_CONTENT_ELEMENT, documentType); Element searchableContentElement = createDocumentContentSection(document, standardDocContent.getSearchableContent(), documentContentVO.getSearchableDefinitions(), documentContentVO.getSearchableContent(), KEWConstants.SEARCHABLE_CONTENT_ELEMENT, documentType); if (applicationContentElement != null) { root.appendChild(applicationContentElement); } if (attributeContentElement != null) { root.appendChild(attributeContentElement); } if (searchableContentElement != null) { root.appendChild(searchableContentElement); } documentContent = XmlHelper.writeNode(document); } catch (Exception e) { handleException("Error parsing document content.", e); } return documentContent; } private static Element createDocumentContentSection(Document document, Element existingAttributeElement, WorkflowAttributeDefinitionDTO[] definitions, String content, String elementName, DocumentType documentType) throws Exception { Element contentSectionElement = existingAttributeElement; // if they've updated the content, we're going to re-build the content section element from scratch if (content != null) { if (!Utilities.isEmpty(content)) { contentSectionElement = document.createElement(elementName); // if they didn't merely clear the content, let's build the content section element by combining the children // of the incoming XML content Element incomingAttributeElement = XmlHelper.readXml(content).getDocumentElement(); NodeList children = incomingAttributeElement.getChildNodes(); for (int index = 0; index < children.getLength(); index++) { contentSectionElement.appendChild(document.importNode(children.item(index), true)); } } else { contentSectionElement = null; } } // if they have new definitions we're going to append those to the existing content section if (!Utilities.isEmpty(definitions)) { String errorMessage = ""; boolean inError = false; if (contentSectionElement == null) { contentSectionElement = document.createElement(elementName); } for (int index = 0; index < definitions.length; index++) { WorkflowAttributeDefinitionDTO definitionVO = definitions[index]; AttributeDefinition definition = convertWorkflowAttributeDefinitionVO(definitionVO, documentType); RuleAttribute ruleAttribute = definition.getRuleAttribute(); Object attribute = GlobalResourceLoader.getResourceLoader().getObject(definition.getObjectDefinition()); boolean propertiesAsMap = false; if (KEWConstants.RULE_XML_ATTRIBUTE_TYPE.equals(ruleAttribute.getType())) { ((GenericXMLRuleAttribute) attribute).setRuleAttribute(ruleAttribute); propertiesAsMap = true; } else if (KEWConstants.SEARCHABLE_XML_ATTRIBUTE_TYPE.equals(ruleAttribute.getType())) { ((GenericXMLSearchableAttribute) attribute).setRuleAttribute(ruleAttribute); propertiesAsMap = true; } if (propertiesAsMap) { for (PropertyDefinitionDTO propertyDefinitionVO : definitionVO.getProperties()) { if (attribute instanceof GenericXMLRuleAttribute) { ((GenericXMLRuleAttribute) attribute).getParamMap().put(propertyDefinitionVO.getName(), propertyDefinitionVO.getValue()); } else if (attribute instanceof GenericXMLSearchableAttribute) { ((GenericXMLSearchableAttribute) attribute).getParamMap().put(propertyDefinitionVO.getName(), propertyDefinitionVO.getValue()); } } } // validate inputs from client application if the attribute is capable if (attribute instanceof WorkflowAttributeXmlValidator) { List errors = ((WorkflowAttributeXmlValidator) attribute).validateClientRoutingData(); if (!errors.isEmpty()) { inError = true; errorMessage += "Error validating attribute " + definitions[index].getAttributeName() + " "; for (Iterator iter = errors.iterator(); iter.hasNext();) { WorkflowAttributeValidationError error = (WorkflowAttributeValidationError) iter.next(); errorMessage += error.getMessage() + " "; } } } // dont add to xml if attribute is in error if (!inError) { if (attribute instanceof WorkflowAttribute) { String attributeDocContent = ((WorkflowAttribute) attribute).getDocContent(); if (!StringUtils.isEmpty(attributeDocContent)) { XmlHelper.appendXml(contentSectionElement, attributeDocContent); } } else if (attribute instanceof SearchableAttribute) { String searcheAttributeContent = ((SearchableAttribute) attribute).getSearchContent(DocSearchUtils.getDocumentSearchContext("", documentType.getName(), "")); if (!StringUtils.isEmpty(searcheAttributeContent)) { XmlHelper.appendXml(contentSectionElement, searcheAttributeContent); } } } } if (inError) { throw new WorkflowRuntimeException(errorMessage); } } if (contentSectionElement != null) { // always be sure and import the element into the new document, if it originated from the existing doc content // and // appended to it, it will need to be imported contentSectionElement = (Element) document.importNode(contentSectionElement, true); } return contentSectionElement; } public static DocumentContentDTO convertDocumentContent(String documentContentValue, Long documentId) throws WorkflowException { if (documentContentValue == null) { return null; } DocumentContentDTO documentContentVO = new DocumentContentDTO(); // initialize the content fields documentContentVO.setApplicationContent(""); documentContentVO.setAttributeContent(""); documentContentVO.setSearchableContent(""); documentContentVO.setRouteHeaderId(documentId); try { DocumentContent documentContent = new StandardDocumentContent(documentContentValue); if (documentContent.getApplicationContent() != null) { documentContentVO.setApplicationContent(XmlHelper.writeNode(documentContent.getApplicationContent())); } if (documentContent.getAttributeContent() != null) { documentContentVO.setAttributeContent(XmlHelper.writeNode(documentContent.getAttributeContent())); } if (documentContent.getSearchableContent() != null) { documentContentVO.setSearchableContent(XmlHelper.writeNode(documentContent.getSearchableContent())); } } catch (Exception e) { handleException("Error parsing document content.", e); } return documentContentVO; } public static WorkgroupDTO convertWorkgroup(Workgroup workgroup) { if (workgroup == null) { return null; } WorkgroupDTO workgroupVO = new WorkgroupDTO(); workgroupVO.setActiveInd(workgroup.getActiveInd().booleanValue()); workgroupVO.setDescription(workgroup.getDescription()); workgroupVO.setWorkgroupId(workgroup.getWorkflowGroupId().getGroupId()); workgroupVO.setWorkgroupName(workgroup.getGroupNameId().getNameId()); workgroupVO.setWorkgroupType(workgroup.getWorkgroupType()); if (workgroup.getUsers() != null) { workgroupVO.setMembers(new UserDTO[workgroup.getUsers().size()]); int index = 0; for (Iterator iterator = workgroup.getUsers().iterator(); iterator.hasNext(); index++) { WorkflowUser user = (WorkflowUser) iterator.next(); workgroupVO.getMembers()[index] = convertUser(user); } } return workgroupVO; } public static UserDTO convertUser(WorkflowUser user) { if (user == null) { return null; } UserDTO userVO = new UserDTO(); userVO.setNetworkId(user.getAuthenticationUserId() == null ? null : user.getAuthenticationUserId().getAuthenticationId()); userVO.setUuId(user.getUuId() == null ? null : user.getUuId().getUuId()); userVO.setEmplId(user.getEmplId() == null ? null : user.getEmplId().getEmplId()); userVO.setWorkflowId(user.getWorkflowUserId() == null ? null : user.getWorkflowUserId().getWorkflowId()); userVO.setDisplayName(user.getDisplayName()); userVO.setLastName(user.getLastName()); userVO.setFirstName(user.getGivenName()); userVO.setEmailAddress(user.getEmailAddress()); // Preferences preferences = SpringServiceLocator.getPreferencesService().getPreferences(user); // userVO.setUserPreferencePopDocHandler(KEWConstants.PREFERENCES_YES_VAL.equals(preferences.getOpenNewWindow())); userVO.setUserPreferencePopDocHandler(true); return userVO; } public static WorkflowUser convertUserVO(UserDTO userVO) throws KEWUserNotFoundException { if (userVO == null) { return null; } UserId userId = null; if (userVO.getWorkflowId() != null) { userId = new WorkflowUserId(userVO.getWorkflowId()); } else if (userVO.getNetworkId() != null) { userId = new AuthenticationUserId(userVO.getNetworkId()); } else if (userVO.getEmplId() != null) { userId = new EmplId(userVO.getEmplId()); } else if (userVO.getUuId() != null) { userId = new UuId(userVO.getUuId()); } else { throw new KEWUserNotFoundException("Cannot convert the given UserVO, it does not contain any valid user ids."); } return KEWServiceLocator.getUserService().getWorkflowUser(userId); } public static DocumentTypeDTO convertDocumentType(DocumentType docType) { DocumentTypeDTO docTypeVO = new DocumentTypeDTO(); docTypeVO.setDocTypeParentId(docType.getDocTypeParentId()); if (docType.getParentDocType() != null) { docTypeVO.setDocTypeParentName(docType.getParentDocType().getName()); } docTypeVO.setDocTypeDescription(docType.getDescription()); docTypeVO.setDocTypeHandlerUrl(docType.getDocHandlerUrl()); docTypeVO.setDocTypeId(docType.getDocumentTypeId()); docTypeVO.setDocTypeLabel(docType.getLabel()); docTypeVO.setName(docType.getName()); docTypeVO.setDocTypeVersion(docType.getVersion()); Boolean currentInd = docType.getCurrentInd(); if (currentInd == null) { docTypeVO.setDocTypeCurrentInd(null); } else if (currentInd.booleanValue()) { docTypeVO.setDocTypeCurrentInd(KEWConstants.ACTIVE_CD); } else { docTypeVO.setDocTypeCurrentInd(KEWConstants.INACTIVE_CD); } docTypeVO.setPostProcessorName(docType.getPostProcessorName()); docTypeVO.setDocTypeJndiFactoryClass(null); docTypeVO.setDocTypeActiveInd(docType.getActiveInd().booleanValue()); if (docType.getParentDocType() != null) { docTypeVO.setDocTypeActiveInherited(true); } else { docTypeVO.setDocTypeActiveInherited(false); } docTypeVO.setDocTypePreApprovalPolicy(docType.getPreApprovePolicy().getPolicyValue().booleanValue()); Workgroup blanketWorkgroup = docType.getBlanketApproveWorkgroup(); if (blanketWorkgroup != null) { docTypeVO.setBlanketApproveWorkgroupId(blanketWorkgroup.getWorkflowGroupId().getGroupId()); } docTypeVO.setBlanketApprovePolicy(docType.getBlanketApprovePolicy()); if (CompatUtils.isRouteLevelCompatible(docType)) { List nodes = CompatUtils.getRouteLevelCompatibleNodeList(docType); RouteTemplateEntryDTO[] templates = new RouteTemplateEntryDTO[nodes.size()]; int index = 0; for (Iterator iterator = nodes.iterator(); iterator.hasNext();) { RouteNode node = (RouteNode) iterator.next(); templates[index++] = convertRouteTemplateEntry(node); } docTypeVO.setRouteTemplates(templates); } docTypeVO.setRoutePath(convertRoutePath(docType)); return docTypeVO; } public static RouteTemplateEntryDTO convertRouteTemplateEntry(RouteNode node) { RouteTemplateEntryDTO entryVO = new RouteTemplateEntryDTO(); entryVO.setFinalApprover(node.getFinalApprovalInd().booleanValue()); entryVO.setMandatoryRoute(node.getMandatoryRouteInd().booleanValue()); entryVO.setRouteLevel(CompatUtils.getLevelForNode(node.getDocumentType(), node.getRouteNodeName())); entryVO.setRouteLevelName(node.getRouteNodeName()); entryVO.setRouteMethodName(node.getRouteMethodName()); entryVO.setDocTypeId(node.getDocumentTypeId()); entryVO.setExceptionWorkgroupId(node.getExceptionWorkgroupId()); entryVO.setJrf_ver_nbr(node.getLockVerNbr()); entryVO.setMandatoryRoute(node.getMandatoryRouteInd().toString()); return entryVO; } public static RoutePathDTO convertRoutePath(DocumentType documentType) { RoutePathDTO routePath = new RoutePathDTO(); ProcessDTO[] processes = new ProcessDTO[documentType.getProcesses().size()]; int index = 0; for (Iterator iterator = documentType.getProcesses().iterator(); iterator.hasNext();) { Process process = (Process) iterator.next(); processes[index++] = convertProcess(process); } routePath.setProcesses(processes); return routePath; } public static ActionRequestDTO convertActionRequest(ActionRequestValue actionRequest) throws KEWUserNotFoundException { // TODO some newly added actionrequest properties are not here (delegation stuff) ActionRequestDTO actionRequestVO = new ActionRequestDTO(); actionRequestVO.setActionRequested(actionRequest.getActionRequested()); actionRequestVO.setActionRequestId(actionRequest.getActionRequestId()); if (actionRequest.getActionTaken() != null) { actionRequestVO.setActionTakenId(actionRequest.getActionTakenId()); actionRequestVO.setActionTaken(convertActionTaken(actionRequest.getActionTaken())); } actionRequestVO.setAnnotation(actionRequest.getAnnotation()); actionRequestVO.setDateCreated(Utilities.convertTimestamp(actionRequest.getCreateDate())); actionRequestVO.setDocVersion(actionRequest.getDocVersion()); actionRequestVO.setUserDTO(convertUser(actionRequest.getWorkflowUser())); if (actionRequest.getWorkflowId() != null) { // TODO switch this to a user vo actionRequestVO.setEmplyId(actionRequest.getWorkflowUser().getEmplId().getEmplId()); } actionRequestVO.setIgnorePrevAction(actionRequest.getIgnorePrevAction()); actionRequestVO.setPriority(actionRequest.getPriority()); actionRequestVO.setRecipientTypeCd(actionRequest.getRecipientTypeCd()); actionRequestVO.setResponsibilityDesc(actionRequest.getResponsibilityDesc()); actionRequestVO.setResponsibilityId(actionRequest.getResponsibilityId()); actionRequestVO.setRouteHeaderId(actionRequest.getRouteHeaderId()); actionRequestVO.setRouteLevel(actionRequest.getRouteLevel()); actionRequestVO.setNodeName(actionRequest.getPotentialNodeName()); actionRequestVO.setNodeInstanceId((actionRequest.getNodeInstance() == null ? null : actionRequest.getNodeInstance().getRouteNodeInstanceId())); // actionRequestVO.setRouteMethodName(actionRequest.getRouteMethodName()); // TODO delyea - should below be using actionRequest.getRoleName()? actionRequestVO.setRoleName(actionRequest.getQualifiedRoleName()); actionRequestVO.setQualifiedRoleName(actionRequest.getQualifiedRoleName()); actionRequestVO.setQualifiedRoleNameLabel(actionRequest.getQualifiedRoleNameLabel()); actionRequestVO.setStatus(actionRequest.getStatus()); if (actionRequest.isWorkgroupRequest()) { actionRequestVO.setWorkgroupId(actionRequest.getWorkgroupId()); actionRequestVO.setWorkgroupDTO(convertWorkgroup(actionRequest.getWorkgroup())); } actionRequestVO.setParentActionRequestId(actionRequest.getParentActionRequestId()); ActionRequestDTO[] childRequestVOs = new ActionRequestDTO[actionRequest.getChildrenRequests().size()]; int index = 0; for (Iterator iterator = actionRequest.getChildrenRequests().iterator(); iterator.hasNext();) { ActionRequestValue childRequest = (ActionRequestValue) iterator.next(); ActionRequestDTO childRequestVO = convertActionRequest(childRequest); childRequestVO.setParentActionRequest(actionRequestVO); childRequestVOs[index++] = childRequestVO; } actionRequestVO.setChildrenRequests(childRequestVOs); return actionRequestVO; } public static ActionTakenDTO convertActionTaken(ActionTakenValue actionTaken) throws KEWUserNotFoundException { if (actionTaken == null) { return null; } ActionTakenDTO actionTakenVO = new ActionTakenDTO(); actionTakenVO.setActionDate(Utilities.convertTimestamp(actionTaken.getActionDate())); actionTakenVO.setActionTaken(actionTaken.getActionTaken()); actionTakenVO.setActionTakenId(actionTaken.getActionTakenId()); actionTakenVO.setAnnotation(actionTaken.getAnnotation()); actionTakenVO.setDocVersion(actionTaken.getDocVersion()); actionTakenVO.setRouteHeaderId(actionTaken.getRouteHeaderId()); WorkflowUser user = actionTaken.getWorkflowUser(); if (user != null) { actionTakenVO.setUserDTO(convertUser(user)); } WorkflowUser delegator = actionTaken.getDelegatorUser(); if (delegator != null) { actionTakenVO.setDelegatorDTO(convertUser(delegator)); } return actionTakenVO; } public static WorkgroupIdDTO convertGroupId(GroupId groupId) { WorkgroupIdDTO workgroupId = null; if (groupId instanceof GroupNameId) { GroupNameId groupName = (GroupNameId) groupId; workgroupId = new WorkgroupNameIdDTO(groupName.getNameId()); } else if (groupId instanceof WorkflowGroupId) { WorkflowGroupId workflowGroupId = (WorkflowGroupId) groupId; workgroupId = new WorkflowGroupIdDTO(workflowGroupId.getGroupId()); } return workgroupId; } public static GroupId convertWorkgroupIdVO(WorkgroupIdDTO workgroupId) { GroupId groupId = null; if (workgroupId instanceof WorkgroupNameIdDTO) { WorkgroupNameIdDTO workgroupName = (WorkgroupNameIdDTO) workgroupId; groupId = new GroupNameId(workgroupName.getWorkgroupName()); } else if (workgroupId instanceof WorkflowGroupIdDTO) { WorkflowGroupIdDTO workflowGroupId = (WorkflowGroupIdDTO) workgroupId; groupId = new WorkflowGroupId(workflowGroupId.getWorkgroupId()); } return groupId; } public static UserIdDTO convertUserId(UserId userId) { UserIdDTO userIdVO = null; if (userId instanceof AuthenticationUserId) { AuthenticationUserId id = (AuthenticationUserId) userId; userIdVO = new NetworkIdDTO(id.getAuthenticationId()); } else if (userId instanceof EmplId) { EmplId id = (EmplId) userId; userIdVO = new EmplIdDTO(id.getEmplId()); } else if (userId instanceof UuId) { UuId id = (UuId) userId; userIdVO = new UuIdDTO(id.getUuId()); } else if (userId instanceof WorkflowUserId) { WorkflowUserId id = (WorkflowUserId) userId; userIdVO = new WorkflowIdDTO(id.getWorkflowId()); } return userIdVO; } public static UserId convertUserIdVO(UserIdDTO userIdVO) { UserId userId = null; if (userIdVO instanceof NetworkIdDTO) { NetworkIdDTO id = (NetworkIdDTO) userIdVO; userId = new AuthenticationUserId(id.getNetworkId()); if (userId.isEmpty()) { throw new RuntimeException("Attempting to use empty NetworkId"); } } else if (userIdVO instanceof EmplIdDTO) { EmplIdDTO id = (EmplIdDTO) userIdVO; userId = new EmplId(id.getEmplId()); if (userId.isEmpty()) { throw new RuntimeException("Attempting to use empty EmplId"); } } else if (userIdVO instanceof UuIdDTO) { UuIdDTO id = (UuIdDTO) userIdVO; userId = new UuId(id.getUuId()); if (userId.isEmpty()) { throw new RuntimeException("Attempting to use empty UuId"); } } else if (userIdVO instanceof WorkflowIdDTO) { WorkflowIdDTO id = (WorkflowIdDTO) userIdVO; userId = new WorkflowUserId(id.getWorkflowId()); if (userId.isEmpty()) { throw new RuntimeException("Attempting to use empty WorkflowId"); } } return userId; } public static ResponsiblePartyDTO convertResponsibleParty(ResponsibleParty responsibleParty) { if (responsibleParty == null) { return null; } ResponsiblePartyDTO responsiblePartyVO = new ResponsiblePartyDTO(); responsiblePartyVO.setWorkgroupId(DTOConverter.convertGroupId(responsibleParty.getGroupId())); responsiblePartyVO.setUserId(DTOConverter.convertUserId(responsibleParty.getUserId())); responsiblePartyVO.setRoleName(responsibleParty.getRoleName()); return responsiblePartyVO; } public static ResponsibleParty convertResponsiblePartyVO(ResponsiblePartyDTO responsiblePartyVO) { if (responsiblePartyVO == null) { return null; } ResponsibleParty responsibleParty = new ResponsibleParty(); responsibleParty.setGroupId(DTOConverter.convertWorkgroupIdVO(responsiblePartyVO.getWorkgroupId())); responsibleParty.setUserId(DTOConverter.convertUserIdVO(responsiblePartyVO.getUserId())); responsibleParty.setRoleName(responsiblePartyVO.getRoleName()); return responsibleParty; } /** * refactor name to convertResponsiblePartyVO when ResponsibleParty object is gone * * @param responsiblePartyVO * @return * @throws KEWUserNotFoundException */ public static Recipient convertResponsiblePartyVOtoRecipient(ResponsiblePartyDTO responsiblePartyVO) throws KEWUserNotFoundException { if (responsiblePartyVO == null) { return null; } if (responsiblePartyVO.getRoleName() != null) { return new RoleRecipient(responsiblePartyVO.getRoleName()); } GroupId groupId = convertWorkgroupIdVO(responsiblePartyVO.getWorkgroupId()); if (groupId != null) { return KEWServiceLocator.getWorkgroupService().getWorkgroup(groupId); } UserId userId = convertUserIdVO(responsiblePartyVO.getUserId()); if (userId != null) { return KEWServiceLocator.getUserService().getWorkflowUser(userId); } throw new WorkflowRuntimeException("ResponsibleParty of unknown type"); } /** * Converts an ActionRequestVO to an ActionRequest. The ActionRequestVO passed in must be the root action request in the * graph, otherwise an IllegalArgumentException is thrown. This is to avoid potentially sticky issues with circular * references in the conversion. NOTE: This method's primary purpose is to convert ActionRequestVOs returned from a * RouteModule. Incidentally, the VO's returned from the route module will be lacking some information (like the node * instance) so no attempts are made to convert this data since further initialization is handled by a higher level * component (namely ActionRequestService.initializeActionRequestGraph). */ public static ActionRequestValue convertActionRequestVO(ActionRequestDTO actionRequestVO) throws KEWUserNotFoundException { if (actionRequestVO == null) { return null; } if (actionRequestVO.getParentActionRequest() != null || actionRequestVO.getParentActionRequestId() != null) { throw new IllegalArgumentException("Cannot convert a non-root ActionRequestVO"); } ActionRequestValue actionRequest = new ActionRequestFactory().createBlankActionRequest(); populateActionRequest(actionRequest, actionRequestVO); if (actionRequestVO.getChildrenRequests() != null) { for (int i = 0; i < actionRequestVO.getChildrenRequests().length; i++) { ActionRequestDTO childVO = actionRequestVO.getChildrenRequests()[i]; actionRequest.getChildrenRequests().add(convertActionRequestVO(childVO, actionRequest)); } } return actionRequest; } public static ActionRequestValue convertActionRequestVO(ActionRequestDTO actionRequestVO, ActionRequestValue parentActionRequest) throws KEWUserNotFoundException { if (actionRequestVO == null) { return null; } ActionRequestValue actionRequest = new ActionRequestFactory().createBlankActionRequest(); populateActionRequest(actionRequest, actionRequestVO); actionRequest.setParentActionRequest(parentActionRequest); actionRequest.setParentActionRequestId(parentActionRequest.getActionRequestId()); if (actionRequestVO.getChildrenRequests() != null) { for (int i = 0; i < actionRequestVO.getChildrenRequests().length; i++) { ActionRequestDTO childVO = actionRequestVO.getChildrenRequests()[i]; actionRequest.getChildrenRequests().add(convertActionRequestVO(childVO, actionRequest)); } } return actionRequest; } /** * This method converts everything except for the parent and child requests */ private static void populateActionRequest(ActionRequestValue actionRequest, ActionRequestDTO actionRequestVO) throws KEWUserNotFoundException { actionRequest.setActionRequested(actionRequestVO.getActionRequested()); actionRequest.setActionRequestId(actionRequestVO.getActionRequestId()); actionRequest.setActionTakenId(actionRequestVO.getActionTakenId()); actionRequest.setAnnotation(actionRequestVO.getAnnotation()); actionRequest.setApprovePolicy(actionRequestVO.getApprovePolicy()); actionRequest.setCreateDate(new Timestamp(new Date().getTime())); actionRequest.setCurrentIndicator(actionRequestVO.getCurrentIndicator()); actionRequest.setDelegationType(actionRequestVO.getDelegationType()); actionRequest.setDocVersion(actionRequestVO.getDocVersion()); actionRequest.setIgnorePrevAction(actionRequestVO.getIgnorePrevAction()); actionRequest.setPriority(actionRequestVO.getPriority()); actionRequest.setQualifiedRoleName(actionRequestVO.getQualifiedRoleName()); actionRequest.setQualifiedRoleNameLabel(actionRequestVO.getQualifiedRoleNameLabel()); actionRequest.setRecipientTypeCd(actionRequestVO.getRecipientTypeCd()); actionRequest.setResponsibilityDesc(actionRequestVO.getResponsibilityDesc()); actionRequest.setResponsibilityId(actionRequestVO.getResponsibilityId()); actionRequest.setRoleName(actionRequestVO.getRoleName()); Long routeHeaderId = actionRequestVO.getRouteHeaderId(); if (routeHeaderId != null) { actionRequest.setRouteHeaderId(routeHeaderId); actionRequest.setRouteHeader(KEWServiceLocator.getRouteHeaderService().getRouteHeader(routeHeaderId)); } // properties set in routemanagerservice actionRequest.setRouteLevel(actionRequestVO.getRouteLevel()); // TODO add the node instance to the VO // actionRequest.setRouteMethodName(actionRequestVO.getRouteMethodName()); actionRequest.setStatus(actionRequestVO.getStatus()); // TODO this should be moved to a validate somewhere's... boolean userSet = false; if (actionRequestVO.getUserIdVO() != null) { UserId userId = convertUserIdVO(actionRequestVO.getUserIdVO()); WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(userId); actionRequest.setWorkflowId(user.getWorkflowId()); userSet = true; } else if (actionRequestVO.getEmplyId() != null) { WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(new EmplId(actionRequestVO.getEmplyId())); actionRequest.setWorkflowId(user.getWorkflowId()); userSet = true; } else if (actionRequestVO.getUserDTO() != null) { WorkflowUser user = convertUserVO(actionRequestVO.getUserDTO()); actionRequest.setWorkflowId(user.getWorkflowId()); userSet = true; } if (actionRequestVO.getWorkgroupId() != null) { Long workgroupId = actionRequestVO.getWorkgroupId(); // validate that the workgroup is good. Workgroup workgroup = KEWServiceLocator.getWorkgroupService().getWorkgroup(new WorkflowGroupId(workgroupId)); if (workgroup == null) { throw new RuntimeException("Workgroup Id " + workgroupId + " is invalid. Action Request cannot be activated."); } actionRequest.setWorkgroupId(workgroupId); userSet = true; } else if (actionRequestVO.getWorkgroupDTO() != null) { Long workgroupId = actionRequestVO.getWorkgroupDTO().getWorkgroupId(); // validate that the workgroup is good. Workgroup workgroup = KEWServiceLocator.getWorkgroupService().getWorkgroup(new WorkflowGroupId(workgroupId)); if (workgroup == null) { throw new RuntimeException("Workgroup Id " + workgroupId + " is invalid. Action Request cannot be activated."); } actionRequest.setWorkgroupId(workgroupId); userSet = true; } // TODO role requests will not have a user or workgroup, so this code needs to handle that case if (!userSet) { throw new RuntimeException("Post processor didn't set a user or workgroup on the request"); } } public static ActionTakenValue convertActionTakenVO(ActionTakenDTO actionTakenVO) throws KEWUserNotFoundException { if (actionTakenVO == null) { return null; } ActionTakenValue actionTaken = new ActionTakenValue(); actionTaken.setActionDate(new Timestamp(actionTakenVO.getActionDate().getTimeInMillis())); actionTaken.setActionTaken(actionTakenVO.getActionTaken()); actionTaken.setActionTakenId(actionTakenVO.getActionTakenId()); actionTaken.setAnnotation(actionTakenVO.getAnnotation()); actionTaken.setCurrentIndicator(Boolean.TRUE); WorkflowUser delegator = convertUserVO(actionTakenVO.getDelegatorDTO()); actionTaken.setDelegator(delegator); if (delegator != null) { actionTaken.setDelegatorWorkflowId(delegator.getWorkflowUserId().getWorkflowId()); } actionTaken.setDocVersion(actionTakenVO.getDocVersion()); DocumentRouteHeaderValue routeHeader = KEWServiceLocator.getRouteHeaderService().getRouteHeader(actionTakenVO.getRouteHeaderId()); actionTaken.setRouteHeader(routeHeader); actionTaken.setRouteHeaderId(actionTaken.getRouteHeaderId()); WorkflowUser user = convertUserVO(actionTakenVO.getUserDTO()); actionTaken.setWorkflowId(user.getWorkflowUserId().getWorkflowId()); return actionTaken; } public static DocumentRouteStatusChangeDTO convertDocumentRouteStatusChange(DocumentRouteStatusChange statusChange) { if (statusChange == null) { return null; } DocumentRouteStatusChangeDTO statusChangeVO = new DocumentRouteStatusChangeDTO(); statusChangeVO.setRouteHeaderId(statusChange.getRouteHeaderId()); statusChangeVO.setAppDocId(statusChange.getAppDocId()); statusChangeVO.setOldRouteStatus(statusChange.getOldRouteStatus()); statusChangeVO.setNewRouteStatus(statusChange.getNewRouteStatus()); return statusChangeVO; } public static DocumentRouteLevelChangeDTO convertDocumentRouteLevelChange(DocumentRouteLevelChange routeLevelChange) { if (routeLevelChange == null) { return null; } DocumentRouteLevelChangeDTO routeLevelChangeVO = new DocumentRouteLevelChangeDTO(); routeLevelChangeVO.setRouteHeaderId(routeLevelChange.getRouteHeaderId()); routeLevelChangeVO.setAppDocId(routeLevelChange.getAppDocId()); routeLevelChangeVO.setOldRouteLevel(routeLevelChange.getOldRouteLevel()); routeLevelChangeVO.setNewRouteLevel(routeLevelChange.getNewRouteLevel()); routeLevelChangeVO.setOldNodeName(routeLevelChange.getOldNodeName()); routeLevelChangeVO.setNewNodeName(routeLevelChange.getNewNodeName()); routeLevelChangeVO.setOldNodeInstanceId(routeLevelChange.getOldNodeInstanceId()); routeLevelChangeVO.setNewNodeInstanceId(routeLevelChange.getNewNodeInstanceId()); return routeLevelChangeVO; } public static DeleteEventDTO convertDeleteEvent(DeleteEvent deleteEvent) { if (deleteEvent == null) { return null; } DeleteEventDTO deleteEventVO = new DeleteEventDTO(); deleteEventVO.setRouteHeaderId(deleteEvent.getRouteHeaderId()); deleteEventVO.setAppDocId(deleteEvent.getAppDocId()); return deleteEventVO; } public static ActionTakenEventDTO convertActionTakenEvent(ActionTakenEvent actionTakenEvent) throws KEWUserNotFoundException { if (actionTakenEvent == null) { return null; } ActionTakenEventDTO actionTakenEventVO = new ActionTakenEventDTO(); actionTakenEventVO.setRouteHeaderId(actionTakenEvent.getRouteHeaderId()); actionTakenEventVO.setAppDocId(actionTakenEvent.getAppDocId()); actionTakenEventVO.setActionTaken(convertActionTaken(actionTakenEvent.getActionTaken())); return actionTakenEventVO; } public static BeforeProcessEventDTO convertBeforeProcessEvent(BeforeProcessEvent event) throws KEWUserNotFoundException { if (event == null) { return null; } BeforeProcessEventDTO beforeProcessEvent = new BeforeProcessEventDTO(); beforeProcessEvent.setRouteHeaderId(event.getRouteHeaderId()); beforeProcessEvent.setAppDocId(event.getAppDocId()); beforeProcessEvent.setNodeInstanceId(event.getNodeInstanceId()); return beforeProcessEvent; } public static AfterProcessEventDTO convertAfterProcessEvent(AfterProcessEvent event) throws KEWUserNotFoundException { if (event == null) { return null; } AfterProcessEventDTO afterProcessEvent = new AfterProcessEventDTO(); afterProcessEvent.setRouteHeaderId(event.getRouteHeaderId()); afterProcessEvent.setAppDocId(event.getAppDocId()); afterProcessEvent.setNodeInstanceId(event.getNodeInstanceId()); afterProcessEvent.setSuccessfullyProcessed(event.isSuccessfullyProcessed()); return afterProcessEvent; } public static AttributeDefinition convertWorkflowAttributeDefinitionVO(WorkflowAttributeDefinitionDTO definitionVO, org.kuali.rice.kew.doctype.bo.DocumentType documentType) { if (definitionVO == null) { return null; } // get the rule attribute so we can get's it's message antity and not blow up if it's remote RuleAttribute ruleAttribute = KEWServiceLocator.getRuleAttributeService().findByClassName(definitionVO.getAttributeName()); if (ruleAttribute == null) { ruleAttribute = KEWServiceLocator.getRuleAttributeService().findByName(definitionVO.getAttributeName()); } if (ruleAttribute == null) { throw new WorkflowRuntimeException("Attribute " + definitionVO.getAttributeName() + " not found"); } ObjectDefinition definition = new ObjectDefinition(ruleAttribute.getClassName()); for (int index = 0; index < definitionVO.getConstructorParameters().length; index++) { String parameter = definitionVO.getConstructorParameters()[index]; definition.addConstructorParameter(new DataDefinition(parameter, String.class)); } boolean propertiesAsMap = KEWConstants.RULE_XML_ATTRIBUTE_TYPE.equals(ruleAttribute.getType()) || KEWConstants.SEARCHABLE_XML_ATTRIBUTE_TYPE.equals(ruleAttribute.getType()); if (!propertiesAsMap) { for (int index = 0; index < definitionVO.getProperties().length; index++) { PropertyDefinitionDTO propertyDefVO = definitionVO.getProperties()[index]; definition.addProperty(new PropertyDefinition(propertyDefVO.getName(), new DataDefinition(propertyDefVO.getValue(), String.class))); } } // this is likely from an EDL validate call and ME may needed to be added to the AttDefinitionVO. if (ruleAttribute.getServiceNamespace() != null) { definition.setServiceNamespace(ruleAttribute.getServiceNamespace()); } else { // get the me from the document type if it's been passed in - the document is having action taken on it. if (documentType != null) { definition.setServiceNamespace(documentType.getServiceNamespace()); } } return new AttributeDefinition(ruleAttribute, definition); } public static DocumentDetailDTO convertDocumentDetail(DocumentRouteHeaderValue routeHeader) throws WorkflowException { if (routeHeader == null) { return null; } DocumentDetailDTO detail = new DocumentDetailDTO(); populateRouteHeaderVO(detail, routeHeader); Map nodeInstances = new HashMap(); List actionRequestVOs = new ArrayList(); List rootActionRequests = KEWServiceLocator.getActionRequestService().getRootRequests(routeHeader.getActionRequests()); for (Iterator iterator = rootActionRequests.iterator(); iterator.hasNext();) { ActionRequestValue actionRequest = (ActionRequestValue) iterator.next(); actionRequestVOs.add(convertActionRequest(actionRequest)); RouteNodeInstance nodeInstance = actionRequest.getNodeInstance(); if (nodeInstance == null) { continue; } if (nodeInstance.getRouteNodeInstanceId() == null) { throw new WorkflowException("Error creating document detail structure because of NULL node instance id."); } nodeInstances.put(nodeInstance.getRouteNodeInstanceId(), nodeInstance); } detail.setActionRequests((ActionRequestDTO[]) actionRequestVOs.toArray(new ActionRequestDTO[0])); List nodeInstanceVOs = new ArrayList(); for (Iterator iterator = nodeInstances.values().iterator(); iterator.hasNext();) { RouteNodeInstance nodeInstance = (RouteNodeInstance) iterator.next(); nodeInstanceVOs.add(convertRouteNodeInstance(nodeInstance)); } detail.setNodeInstances((RouteNodeInstanceDTO[]) nodeInstanceVOs.toArray(new RouteNodeInstanceDTO[0])); List actionTakenVOs = new ArrayList(); for (Iterator iterator = routeHeader.getActionsTaken().iterator(); iterator.hasNext();) { ActionTakenValue actionTaken = (ActionTakenValue) iterator.next(); actionTakenVOs.add(convertActionTaken(actionTaken)); } detail.setActionsTaken((ActionTakenDTO[]) actionTakenVOs.toArray(new ActionTakenDTO[0])); return detail; } public static RouteNodeInstanceDTO convertRouteNodeInstance(RouteNodeInstance nodeInstance) throws WorkflowException { if (nodeInstance == null) { return null; } RouteNodeInstanceDTO nodeInstanceVO = new RouteNodeInstanceDTO(); nodeInstanceVO.setActive(nodeInstance.isActive()); nodeInstanceVO.setBranchId(nodeInstance.getBranch().getBranchId()); nodeInstanceVO.setComplete(nodeInstance.isComplete()); nodeInstanceVO.setDocumentId(nodeInstance.getDocumentId()); nodeInstanceVO.setInitial(nodeInstance.isInitial()); nodeInstanceVO.setName(nodeInstance.getName()); nodeInstanceVO.setProcessId(nodeInstance.getProcess() != null ? nodeInstance.getProcess().getRouteNodeInstanceId() : null); nodeInstanceVO.setRouteNodeId(nodeInstance.getRouteNode().getRouteNodeId()); nodeInstanceVO.setRouteNodeInstanceId(nodeInstance.getRouteNodeInstanceId()); nodeInstanceVO.setState(convertStates(nodeInstance.getState())); nodeInstanceVO.setNextNodes(new RouteNodeInstanceDTO[nodeInstance.getNextNodeInstances().size()]); int i = 0; for (Iterator iter = nodeInstance.getNextNodeInstances().iterator(); iter.hasNext(); i++) { RouteNodeInstance nextNodeInstance = (RouteNodeInstance) iter.next(); nodeInstanceVO.getNextNodes()[i] = convertRouteNodeInstance(nextNodeInstance); } return nodeInstanceVO; } public static StateDTO[] convertStates(Collection states) { if (states == null) { return null; } StateDTO[] stateVOs = new StateDTO[states.size()]; int index = 0; for (Iterator iterator = states.iterator(); iterator.hasNext();) { State state = (State) iterator.next(); stateVOs[index++] = convertState(state); } return stateVOs; } public static StateDTO convertState(State nodeState) { if (nodeState == null) { return null; } StateDTO stateVO = new StateDTO(); stateVO.setStateId(nodeState.getStateId()); stateVO.setKey(nodeState.getKey()); stateVO.setValue(nodeState.getValue()); return stateVO; } public static RouteNodeDTO convertRouteNode(RouteNode node) { if (node == null) { return null; } RouteNodeDTO nodeVO = new RouteNodeDTO(); nodeVO.setActivationType(node.getActivationType()); nodeVO.setBranchName(node.getBranch() != null ? node.getBranch().getName() : null); nodeVO.setDocumentTypeId(node.getDocumentTypeId()); try { nodeVO.setExceptionWorkgroup(convertWorkgroup(node.getExceptionWorkgroup())); } catch (KEWUserNotFoundException e) { throw new WorkflowRuntimeException("Could not locate users in exception workgroup for node " + node.getRouteNodeId() + ".", e); } nodeVO.setFinalApprovalInd(node.getFinalApprovalInd().booleanValue()); nodeVO.setMandatoryRouteInd(node.getMandatoryRouteInd().booleanValue()); nodeVO.setNodeType(node.getNodeType()); nodeVO.setRouteMethodCode(node.getRouteMethodCode()); nodeVO.setRouteMethodName(node.getRouteMethodName()); nodeVO.setRouteNodeId(node.getRouteNodeId()); nodeVO.setRouteNodeName(node.getRouteNodeName()); int index = 0; Long[] previousNodeIds = new Long[node.getPreviousNodes().size()]; for (Iterator iterator = node.getPreviousNodes().iterator(); iterator.hasNext();) { RouteNode prevNode = (RouteNode) iterator.next(); previousNodeIds[index++] = prevNode.getRouteNodeId(); } nodeVO.setPreviousNodeIds(previousNodeIds); index = 0; Long[] nextNodeIds = new Long[node.getNextNodes().size()]; for (Iterator iterator = node.getNextNodes().iterator(); iterator.hasNext();) { RouteNode nextNode = (RouteNode) iterator.next(); nextNodeIds[index++] = nextNode.getRouteNodeId(); } nodeVO.setNextNodeIds(nextNodeIds); return nodeVO; } public static ProcessDTO convertProcess(Process process) { ProcessDTO processVO = new ProcessDTO(); processVO.setInitial(process.isInitial()); processVO.setInitialRouteNode(convertRouteNode(process.getInitialRouteNode())); processVO.setName(process.getName()); processVO.setProcessId(process.getProcessId()); return processVO; } public static MovePoint convertMovePointVO(MovePointDTO movePointVO) { MovePoint movePoint = new MovePoint(); movePoint.setStartNodeName(movePointVO.getStartNodeName()); movePoint.setStepsToMove(movePointVO.getStepsToMove()); return movePoint; } public static AdHocRevoke convertAdHocRevokeVO(AdHocRevokeDTO revokeVO) throws WorkflowException { AdHocRevoke revoke = new AdHocRevoke(); revoke.setActionRequestId(revokeVO.getActionRequestId()); revoke.setNodeName(revokeVO.getNodeName()); if (revokeVO.getUserId() != null) { revoke.setUser(KEWServiceLocator.getUserService().getWorkflowUser(revokeVO.getUserId())); } if (revokeVO.getWorkgroupId() != null) { revoke.setWorkgroup(KEWServiceLocator.getWorkgroupService().getWorkgroup(revokeVO.getWorkgroupId())); } return revoke; } public static WorkflowAttributeValidationErrorDTO convertWorkflowAttributeValidationError(WorkflowAttributeValidationError error) { return new WorkflowAttributeValidationErrorDTO(error.getKey(), error.getMessage()); } // Method added for updating notes on server sites based on NoteVO change. Modfy on April 7, 2006 public static void updateNotes(RouteHeaderDTO routeHeaderVO, Long routeHeaderId) { NoteDTO[] notes = routeHeaderVO.getNotes(); NoteDTO[] notesToDelete = routeHeaderVO.getNotesToDelete(); Note noteToDelete = null; Note noteToSave = null; // Add or update notes to note table based on notes array in RouteHeaderVO if (notes != null) { for (int i = 0; i < notes.length; i++) { if (notes[i] != null) { noteToSave = new Note(); noteToSave.setNoteId(notes[i].getNoteId()); noteToSave.setRouteHeaderId(routeHeaderId); noteToSave.setNoteAuthorWorkflowId(notes[i].getNoteAuthorWorkflowId()); noteToSave.setNoteCreateDate(Utilities.convertCalendar(notes[i].getNoteCreateDate())); noteToSave.setNoteText(notes[i].getNoteText()); noteToSave.setLockVerNbr(notes[i].getLockVerNbr()); // if notes[i].getNoteId() == null, add note to note table, otherwise update note to note table getNoteService().saveNote(noteToSave); } } } // Delete notes from note table based on notesToDelete array in RouteHeaderVO if (notesToDelete != null) { for (int i = 0; i < notesToDelete.length; i++) { noteToDelete = getNoteService().getNoteByNoteId(notesToDelete[i].getNoteId()); if (noteToDelete != null) { getNoteService().deleteNote(noteToDelete); } } routeHeaderVO.setNotesToDelete(null); } } private static NoteService getNoteService() { return (NoteService) KEWServiceLocator.getService(KEWServiceLocator.NOTE_SERVICE); } private static NoteDTO[] convertNotesArrayListToNoteVOArray(List notesArrayList) { if (notesArrayList.size() > 0) { NoteDTO[] noteVOArray = new NoteDTO[notesArrayList.size()]; int i = 0; Note tempNote; NoteDTO tempNoteVO; for (Iterator it = notesArrayList.iterator(); it.hasNext();) { tempNote = (Note) it.next(); tempNoteVO = new NoteDTO(); tempNoteVO.setNoteId(tempNote.getNoteId()); tempNoteVO.setRouteHeaderId(tempNote.getRouteHeaderId()); tempNoteVO.setNoteAuthorWorkflowId(tempNote.getNoteAuthorWorkflowId()); tempNoteVO.setNoteCreateDate(Utilities.convertTimestamp(tempNote.getNoteCreateDate())); tempNoteVO.setNoteText(tempNote.getNoteText()); tempNoteVO.setLockVerNbr(tempNote.getLockVerNbr()); noteVOArray[i] = tempNoteVO; i++; } return noteVOArray; } else { return null; } } public static SimulationCriteria convertReportCriteriaDTO(ReportCriteriaDTO criteriaVO) throws KEWUserNotFoundException { if (criteriaVO == null) { return null; } SimulationCriteria criteria = new SimulationCriteria(); criteria.setDestinationNodeName(criteriaVO.getTargetNodeName()); criteria.setDocumentId(criteriaVO.getRouteHeaderId()); criteria.setDocumentTypeName(criteriaVO.getDocumentTypeName()); criteria.setXmlContent(criteriaVO.getXmlContent()); criteria.setActivateRequests(criteriaVO.getActivateRequests()); if (criteriaVO.getRoutingUser() != null) { WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(criteriaVO.getRoutingUser()); if (user == null) { throw new KEWUserNotFoundException("Could not locate user for the given id: " + criteriaVO.getRoutingUser()); } criteria.setRoutingUser(user); } if (criteriaVO.getRuleTemplateNames() != null) { for (int index = 0; index < criteriaVO.getRuleTemplateNames().length; index++) { String ruleTemplateName = criteriaVO.getRuleTemplateNames()[index]; criteria.getRuleTemplateNames().add(ruleTemplateName); } } if (criteriaVO.getNodeNames() != null) { for (int i = 0; i < criteriaVO.getNodeNames().length; i++) { String nodeName = criteriaVO.getNodeNames()[i]; criteria.getNodeNames().add(nodeName); } } if (criteriaVO.getTargetUsers() != null) { for (int index = 0; index < criteriaVO.getTargetUsers().length; index++) { UserIdDTO userIdVO = criteriaVO.getTargetUsers()[index]; WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(userIdVO); if (user == null) { throw new KEWUserNotFoundException("Could not locate user for the given id: " + userIdVO); } criteria.getDestinationRecipients().add(user); } } if (criteriaVO.getActionsToTake() != null) { for (int index = 0; index < criteriaVO.getActionsToTake().length; index++) { ReportActionToTakeDTO actionToTakeVO = criteriaVO.getActionsToTake()[index]; criteria.getActionsToTake().add(convertReportActionToTakeVO(actionToTakeVO)); } } return criteria; } public static SimulationActionToTake convertReportActionToTakeVO(ReportActionToTakeDTO actionToTakeVO) throws KEWUserNotFoundException { if (actionToTakeVO == null) { return null; } SimulationActionToTake actionToTake = new SimulationActionToTake(); actionToTake.setNodeName(actionToTakeVO.getNodeName()); if (StringUtils.isBlank(actionToTakeVO.getActionToPerform())) { throw new IllegalArgumentException("ReportActionToTakeVO must contain an action taken code and does not"); } actionToTake.setActionToPerform(actionToTakeVO.getActionToPerform()); if (actionToTakeVO.getUserIdVO() == null) { throw new IllegalArgumentException("ReportActionToTakeVO must contain a userId and does not"); } WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(actionToTakeVO.getUserIdVO()); if (user == null) { throw new KEWUserNotFoundException("Could not locate user for the given id: " + actionToTakeVO.getUserIdVO()); } actionToTake.setUser(user); return actionToTake; } public static RuleDelegationDTO convertRuleDelegation(RuleDelegation ruleDelegation) throws WorkflowException { if (ruleDelegation == null) { return null; } RuleDelegationDTO ruleDelegationVO = new RuleDelegationDTO(); ruleDelegationVO.setDelegationType(ruleDelegation.getDelegationType()); ruleDelegationVO.setDelegationRule(convertRule(ruleDelegation.getDelegationRuleBaseValues())); return ruleDelegationVO; } // public static RuleDelegation convertRuleExtensionVO(RuleExtensionVO ruleExtensionVO) throws WorkflowException {} public static Collection<RuleExtensionDTO> convertRuleExtension(RuleExtension ruleExtension) throws WorkflowException { if (ruleExtension == null) { return null; } List<RuleExtensionDTO> extensionVOs = new ArrayList<RuleExtensionDTO>(); for (Iterator iter = ruleExtension.getExtensionValues().iterator(); iter.hasNext();) { RuleExtensionValue extensionValue = (RuleExtensionValue) iter.next(); extensionVOs.add(new RuleExtensionDTO(extensionValue.getKey(), extensionValue.getValue())); } return extensionVOs; } public static KeyValuePair convertRuleExtensionVO(RuleExtensionDTO ruleExtensionVO) throws WorkflowException { if (ruleExtensionVO == null) { return null; } return new KeyValuePair(ruleExtensionVO.getKey(), ruleExtensionVO.getValue()); } public static RuleResponsibilityDTO convertRuleResponsibility(RuleResponsibility ruleResponsibility) throws WorkflowException { if (ruleResponsibility == null) { return null; } RuleResponsibilityDTO ruleResponsibilityVO = new RuleResponsibilityDTO(); ruleResponsibilityVO.setActionRequestedCd(ruleResponsibility.getActionRequestedCd()); ruleResponsibilityVO.setApprovePolicy(ruleResponsibility.getApprovePolicy()); ruleResponsibilityVO.setPriority(ruleResponsibility.getPriority()); ruleResponsibilityVO.setResponsibilityId(ruleResponsibility.getResponsibilityId()); ruleResponsibilityVO.setRoleName(ruleResponsibility.getRole()); ruleResponsibilityVO.setUser(convertUser(ruleResponsibility.getWorkflowUser())); ruleResponsibilityVO.setWorkgroup(convertWorkgroup(ruleResponsibility.getWorkgroup())); for (Iterator iter = ruleResponsibility.getDelegationRules().iterator(); iter.hasNext();) { RuleDelegation ruleDelegation = (RuleDelegation) iter.next(); ruleResponsibilityVO.addDelegationRule(convertRuleDelegation(ruleDelegation)); } return ruleResponsibilityVO; } // public static KeyValuePair convertRuleResponsibilityVO(RuleResponsibilityVO ruleResponsibilityVO) throws // WorkflowException {} public static RuleDTO convertRule(RuleBaseValues ruleValues) throws WorkflowException { if (ruleValues == null) { return null; } RuleDTO rule = new RuleDTO(); rule.setActiveInd(ruleValues.getActiveInd()); rule.setDescription(ruleValues.getDescription()); rule.setDocTypeName(ruleValues.getDocTypeName()); rule.setFromDate(ruleValues.getFromDateString()); rule.setToDate(ruleValues.getToDateString()); rule.setIgnorePrevious(ruleValues.getIgnorePrevious()); rule.setRuleTemplateId(ruleValues.getRuleTemplateId()); rule.setRuleTemplateName(ruleValues.getRuleTemplateName()); // get keyPair values to setup RuleExtensionVOs for (Iterator iter = ruleValues.getRuleExtensions().iterator(); iter.hasNext();) { RuleExtension ruleExtension = (RuleExtension) iter.next(); rule.addRuleExtensions(convertRuleExtension(ruleExtension)); } // get keyPair values to setup RuleExtensionVOs for (Iterator iter = ruleValues.getResponsibilities().iterator(); iter.hasNext();) { RuleResponsibility ruleResponsibility = (RuleResponsibility) iter.next(); rule.addRuleResponsibility(convertRuleResponsibility(ruleResponsibility)); } return rule; } public static DocSearchCriteriaDTO convertDocumentSearchCriteriaDTO(DocumentSearchCriteriaDTO criteriaVO) throws WorkflowException { DocSearchCriteriaDTO criteria = new DocSearchCriteriaDTO(); criteria.setAppDocId(criteriaVO.getAppDocId()); criteria.setApprover(criteriaVO.getApprover()); criteria.setDocRouteStatus(criteriaVO.getDocRouteStatus()); criteria.setDocTitle(criteriaVO.getDocTitle()); criteria.setDocTypeFullName(criteriaVO.getDocTypeFullName()); criteria.setDocVersion(criteriaVO.getDocVersion()); criteria.setFromDateApproved(criteriaVO.getFromDateApproved()); criteria.setFromDateCreated(criteriaVO.getFromDateCreated()); criteria.setFromDateFinalized(criteriaVO.getFromDateFinalized()); criteria.setFromDateLastModified(criteriaVO.getFromDateLastModified()); criteria.setInitiator(criteriaVO.getInitiator()); criteria.setIsAdvancedSearch((criteriaVO.isAdvancedSearch()) ? DocSearchCriteriaDTO.ADVANCED_SEARCH_INDICATOR_STRING : "NO"); criteria.setSuperUserSearch((criteriaVO.isSuperUserSearch()) ? DocSearchCriteriaDTO.SUPER_USER_SEARCH_INDICATOR_STRING : "NO"); criteria.setRouteHeaderId(criteriaVO.getRouteHeaderId()); criteria.setViewer(criteriaVO.getViewer()); criteria.setWorkgroupViewerName(criteriaVO.getWorkgroupViewerName()); criteria.setToDateApproved(criteriaVO.getToDateApproved()); criteria.setToDateCreated(criteriaVO.getToDateCreated()); criteria.setToDateFinalized(criteriaVO.getToDateFinalized()); criteria.setToDateLastModified(criteriaVO.getToDateLastModified()); criteria.setThreshold(criteriaVO.getThreshold()); criteria.setSaveSearchForUser(criteriaVO.isSaveSearchForUser()); // generate the route node criteria if ( (StringUtils.isNotBlank(criteriaVO.getDocRouteNodeName())) && (StringUtils.isBlank(criteriaVO.getDocTypeFullName())) ) { throw new WorkflowException("No document type name specified when attempting to search by route node name '" + criteriaVO.getDocRouteNodeName() + "'"); } else if ( (StringUtils.isNotBlank(criteriaVO.getDocRouteNodeName())) && (StringUtils.isNotBlank(criteriaVO.getDocTypeFullName())) ) { criteria.setDocRouteNodeLogic(criteriaVO.getDocRouteNodeLogic()); List routeNodes = KEWServiceLocator.getRouteNodeService().getFlattenedNodes(getDocumentTypeByName(criteria.getDocTypeFullName()), true); boolean foundRouteNode = false; for (Iterator iterator = routeNodes.iterator(); iterator.hasNext();) { RouteNode routeNode = (RouteNode) iterator.next(); if (criteriaVO.getDocRouteNodeName().equals(routeNode.getRouteNodeName())) { foundRouteNode = true; break; } } if (!foundRouteNode) { throw new WorkflowException("Could not find route node name '" + criteriaVO.getDocRouteNodeName() + "' for document type name '" + criteriaVO.getDocTypeFullName() + "'"); } criteria.setDocRouteNodeId(criteriaVO.getDocRouteNodeName()); } // build a map of the search attributes passed in from the client creating lists where keys are duplicated HashMap<String, List<String>> searchAttributeValues = new HashMap<String,List<String>>(); for (KeyValueDTO keyValueVO : criteriaVO.getSearchAttributeValues()) { if (searchAttributeValues.containsKey(keyValueVO.getKey())) { searchAttributeValues.get(keyValueVO.getKey()).add(keyValueVO.getValue()); } else { searchAttributeValues.put(keyValueVO.getKey(), Arrays.asList(new String[]{keyValueVO.getValue()})); } } // build the list of SearchAttributeFormContainer objects List propertyFields = new ArrayList(); for (String key : searchAttributeValues.keySet()) { List<String> values = searchAttributeValues.get(key); SearchAttributeFormContainer container = null; if (values.size() == 1) { container = new SearchAttributeFormContainer(key, values.get(0)); } else if (values.size() > 1) { container = new SearchAttributeFormContainer(key, (String[])values.toArray()); } if (container != null) { propertyFields.add(container); } } DocSearchUtils.addSearchableAttributesToCriteria(criteria, propertyFields, true); return criteria; } private static DocumentType getDocumentTypeByName(String documentTypeName) { return KEWServiceLocator.getDocumentTypeService().findByName(documentTypeName); } public static DocumentSearchResultDTO convertDocumentSearchResultComponents(DocumentSearchResultComponents searchResult) throws WorkflowException { DocumentSearchResultDTO resultsVO = new DocumentSearchResultDTO(); resultsVO.setColumns(convertColumns(searchResult.getColumns())); resultsVO.setSearchResults(convertDocumentSearchResults(searchResult.getSearchResults())); return resultsVO; } private static List<DocumentSearchResultRowDTO> convertDocumentSearchResults(List<DocumentSearchResult> searchResults) throws WorkflowException { List<DocumentSearchResultRowDTO> rowVOs = new ArrayList<DocumentSearchResultRowDTO>(); for (DocumentSearchResult documentSearchResult : searchResults) { rowVOs.add(convertDocumentSearchResult(documentSearchResult)); } return rowVOs; } public static DocumentSearchResultRowDTO convertDocumentSearchResult(DocumentSearchResult resultRow) throws WorkflowException { DocumentSearchResultRowDTO rowVO = new DocumentSearchResultRowDTO(); List<KeyValueDTO> fieldValues = new ArrayList<KeyValueDTO>(); for (KeyValueSort keyValueSort : resultRow.getResultContainers()) { fieldValues.add(new KeyValueDTO(keyValueSort.getKey(),keyValueSort.getValue(),keyValueSort.getUserDisplayValue())); } rowVO.setFieldValues(fieldValues); return rowVO; } private static List<LookupableColumnDTO> convertColumns(List<Column> columns) throws WorkflowException { List<LookupableColumnDTO> columnVOs = new ArrayList<LookupableColumnDTO>(); for (Column column : columns) { columnVOs.add(convertColumn(column)); } return columnVOs; } public static LookupableColumnDTO convertColumn(Column column) throws WorkflowException { LookupableColumnDTO columnVO = new LookupableColumnDTO(); columnVO.setColumnTitle(column.getColumnTitle()); columnVO.setKey(column.getKey()); columnVO.setPropertyName(column.getPropertyName()); columnVO.setSortable(column.isSortable()); columnVO.setSortPropertyName(column.getSortPropertyName()); columnVO.setType(column.getType()); List<KeyValueDTO> displayParameters = new ArrayList<KeyValueDTO>(); for (String key : column.getDisplayParameters().keySet()) { displayParameters.add(new KeyValueDTO(key,column.getDisplayParameters().get(key))); } columnVO.setDisplayParameters(displayParameters); return null; } // public static RuleBaseValues convertRuleVO(RuleVO ruleVO) throws WorkflowException {} private static void handleException(String message, Exception e) throws WorkflowException { if (e instanceof RuntimeException) { throw (RuntimeException) e; } else if (e instanceof WorkflowException) { throw (WorkflowException) e; } throw new WorkflowException(message, e); } }
impl/src/main/java/org/kuali/rice/kew/dto/DTOConverter.java
/* * Copyright 2005-2006 The Kuali Foundation. * * * Licensed under the Educational Community License, Version 1.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.kew.dto; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.kuali.rice.core.reflect.DataDefinition; import org.kuali.rice.core.reflect.ObjectDefinition; import org.kuali.rice.core.reflect.PropertyDefinition; import org.kuali.rice.core.resourceloader.GlobalResourceLoader; import org.kuali.rice.kew.actionitem.ActionItem; import org.kuali.rice.kew.actionrequest.ActionRequestFactory; import org.kuali.rice.kew.actionrequest.ActionRequestValue; import org.kuali.rice.kew.actions.AdHocRevoke; import org.kuali.rice.kew.actions.MovePoint; import org.kuali.rice.kew.actions.ValidActions; import org.kuali.rice.kew.actiontaken.ActionTakenValue; import org.kuali.rice.kew.definition.AttributeDefinition; import org.kuali.rice.kew.docsearch.DocSearchCriteriaDTO; import org.kuali.rice.kew.docsearch.DocSearchUtils; import org.kuali.rice.kew.docsearch.DocumentSearchContext; import org.kuali.rice.kew.docsearch.DocumentSearchResult; import org.kuali.rice.kew.docsearch.DocumentSearchResultComponents; import org.kuali.rice.kew.docsearch.SearchableAttribute; import org.kuali.rice.kew.docsearch.web.SearchAttributeFormContainer; import org.kuali.rice.kew.docsearch.xml.GenericXMLSearchableAttribute; import org.kuali.rice.kew.doctype.bo.DocumentType; import org.kuali.rice.kew.engine.CompatUtils; import org.kuali.rice.kew.engine.node.BranchState; import org.kuali.rice.kew.engine.node.KeyValuePair; import org.kuali.rice.kew.engine.node.Process; import org.kuali.rice.kew.engine.node.RouteNode; import org.kuali.rice.kew.engine.node.RouteNodeInstance; import org.kuali.rice.kew.engine.node.State; import org.kuali.rice.kew.engine.simulation.SimulationActionToTake; import org.kuali.rice.kew.engine.simulation.SimulationCriteria; import org.kuali.rice.kew.exception.DocumentTypeNotFoundException; import org.kuali.rice.kew.exception.KEWUserNotFoundException; import org.kuali.rice.kew.exception.WorkflowException; import org.kuali.rice.kew.exception.WorkflowRuntimeException; import org.kuali.rice.kew.lookupable.Column; import org.kuali.rice.kew.notes.Note; import org.kuali.rice.kew.notes.service.NoteService; import org.kuali.rice.kew.postprocessor.ActionTakenEvent; import org.kuali.rice.kew.postprocessor.AfterProcessEvent; import org.kuali.rice.kew.postprocessor.BeforeProcessEvent; import org.kuali.rice.kew.postprocessor.DeleteEvent; import org.kuali.rice.kew.postprocessor.DocumentRouteLevelChange; import org.kuali.rice.kew.postprocessor.DocumentRouteStatusChange; import org.kuali.rice.kew.routeheader.DocumentContent; import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue; import org.kuali.rice.kew.routeheader.StandardDocumentContent; import org.kuali.rice.kew.rule.RuleBaseValues; import org.kuali.rice.kew.rule.RuleDelegation; import org.kuali.rice.kew.rule.RuleExtension; import org.kuali.rice.kew.rule.RuleExtensionValue; import org.kuali.rice.kew.rule.RuleResponsibility; import org.kuali.rice.kew.rule.WorkflowAttribute; import org.kuali.rice.kew.rule.WorkflowAttributeValidationError; import org.kuali.rice.kew.rule.WorkflowAttributeXmlValidator; import org.kuali.rice.kew.rule.bo.RuleAttribute; import org.kuali.rice.kew.rule.xmlrouting.GenericXMLRuleAttribute; import org.kuali.rice.kew.service.KEWServiceLocator; import org.kuali.rice.kew.user.AuthenticationUserId; import org.kuali.rice.kew.user.EmplId; import org.kuali.rice.kew.user.Recipient; import org.kuali.rice.kew.user.RoleRecipient; import org.kuali.rice.kew.user.UserId; import org.kuali.rice.kew.user.UuId; import org.kuali.rice.kew.user.WorkflowUser; import org.kuali.rice.kew.user.WorkflowUserId; import org.kuali.rice.kew.util.KEWConstants; import org.kuali.rice.kew.util.ResponsibleParty; import org.kuali.rice.kew.util.Utilities; import org.kuali.rice.kew.util.XmlHelper; import org.kuali.rice.kew.web.KeyValueSort; import org.kuali.rice.kew.workgroup.GroupId; import org.kuali.rice.kew.workgroup.GroupNameId; import org.kuali.rice.kew.workgroup.WorkflowGroupId; import org.kuali.rice.kew.workgroup.Workgroup; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; /** * Translates Workflow server side beans into client side VO beans. * * @author Kuali Rice Team ([email protected]) */ public class DTOConverter { private static final Logger LOG = Logger.getLogger(DTOConverter.class); public static RouteHeaderDTO convertRouteHeader(DocumentRouteHeaderValue routeHeader, WorkflowUser user) throws WorkflowException, KEWUserNotFoundException { RouteHeaderDTO routeHeaderVO = new RouteHeaderDTO(); if (routeHeader == null) { return null; } populateRouteHeaderVO(routeHeaderVO, routeHeader); if (user != null) { routeHeaderVO.setUserBlanketApprover(false); // default to false if (routeHeader.getDocumentType() != null) { routeHeaderVO.setUserBlanketApprover(routeHeader.getDocumentType().isUserBlanketApprover(user)); } String topActionRequested = KEWConstants.ACTION_REQUEST_FYI_REQ; for (Iterator iter = routeHeader.getActionRequests().iterator(); iter.hasNext();) { ActionRequestValue actionRequest = (ActionRequestValue) iter.next(); // below will control what buttons are drawn on the client we only want the // heaviest action button to show on the client making this code a little combersome if (actionRequest.isRecipientRoutedRequest(user) && actionRequest.isActive()) { int actionRequestComparison = ActionRequestValue.compareActionCode(actionRequest.getActionRequested(), topActionRequested); if (actionRequest.isFYIRequest() && actionRequestComparison >= 0) { routeHeaderVO.setFyiRequested(true); } else if (actionRequest.isAcknowledgeRequest() && actionRequestComparison >= 0) { routeHeaderVO.setAckRequested(true); routeHeaderVO.setFyiRequested(false); topActionRequested = actionRequest.getActionRequested(); } else if (actionRequest.isApproveRequest() && actionRequestComparison >= 0) { routeHeaderVO.setApproveRequested(true); routeHeaderVO.setAckRequested(false); routeHeaderVO.setFyiRequested(false); topActionRequested = actionRequest.getActionRequested(); if (actionRequest.isCompleteRequst()) { routeHeaderVO.setCompleteRequested(true); } } } } // Update notes and notesToDelete arrays in routeHeaderVO routeHeaderVO.setNotesToDelete(null); routeHeaderVO.setNotes(convertNotesArrayListToNoteVOArray(routeHeader.getNotes())); } if (user != null) { routeHeaderVO.setValidActions(convertValidActions(KEWServiceLocator.getActionRegistry().getValidActions(user, routeHeader))); } return routeHeaderVO; } public static RouteHeaderDTO convertActionListRouteHeader(DocumentRouteHeaderValue routeHeader, WorkflowUser user) throws WorkflowException, KEWUserNotFoundException { RouteHeaderDTO routeHeaderVO = new RouteHeaderDTO(); if (routeHeader == null) { return null; } populateRouteHeaderVO(routeHeaderVO, routeHeader); if (user != null) { routeHeaderVO.setUserBlanketApprover(false); // default to false if (routeHeader.getDocumentType() != null) { routeHeaderVO.setUserBlanketApprover(routeHeader.getDocumentType().isUserBlanketApprover(user)); } String topActionRequested = KEWConstants.ACTION_REQUEST_FYI_REQ; for (Iterator iter = routeHeader.getActionRequests().iterator(); iter.hasNext();) { ActionRequestValue actionRequest = (ActionRequestValue) iter.next(); // below will control what buttons are drawn on the client we only want the // heaviest action button to show on the client making this code a little combersome if (actionRequest.isRecipientRoutedRequest(user) && actionRequest.isActive()) { int actionRequestComparison = ActionRequestValue.compareActionCode(actionRequest.getActionRequested(), topActionRequested); if (actionRequest.isFYIRequest() && actionRequestComparison >= 0) { routeHeaderVO.setFyiRequested(true); } else if (actionRequest.isAcknowledgeRequest() && actionRequestComparison >= 0) { routeHeaderVO.setAckRequested(true); routeHeaderVO.setFyiRequested(false); topActionRequested = actionRequest.getActionRequested(); } else if (actionRequest.isApproveRequest() && actionRequestComparison >= 0) { routeHeaderVO.setApproveRequested(true); routeHeaderVO.setAckRequested(false); routeHeaderVO.setFyiRequested(false); topActionRequested = actionRequest.getActionRequested(); if (actionRequest.isCompleteRequst()) { routeHeaderVO.setCompleteRequested(true); } } } } } routeHeaderVO.setValidActions(convertValidActions(KEWServiceLocator.getActionRegistry().getValidActions(user, routeHeader))); return routeHeaderVO; } public static ValidActionsDTO convertValidActions(ValidActions validActions) { ValidActionsDTO validActionsVO = new ValidActionsDTO(); for (Iterator iter = validActions.getActionTakenCodes().iterator(); iter.hasNext();) { String actionTakenCode = (String) iter.next(); validActionsVO.addValidActionsAllowed(actionTakenCode); } return validActionsVO; } private static void populateRouteHeaderVO(RouteHeaderDTO routeHeaderVO, DocumentRouteHeaderValue routeHeader) throws WorkflowException { routeHeaderVO.setRouteHeaderId(routeHeader.getRouteHeaderId()); routeHeaderVO.setAppDocId(routeHeader.getAppDocId()); routeHeaderVO.setDateApproved(Utilities.convertTimestamp(routeHeader.getApprovedDate())); routeHeaderVO.setDateCreated(Utilities.convertTimestamp(routeHeader.getCreateDate())); routeHeaderVO.setDateFinalized(Utilities.convertTimestamp(routeHeader.getFinalizedDate())); routeHeaderVO.setDateLastModified(Utilities.convertTimestamp(routeHeader.getStatusModDate())); /** * This is the original code which set everything up for lazy loading of document content */ // by default, a non-initialized document content object will be sent so that it can be fetched lazily // DocumentContentVO documentContentVO = new DocumentContentVO(); // documentContentVO.setRouteHeaderId(routeHeader.getRouteHeaderId()); // routeHeaderVO.setDocumentContent(documentContentVO); /** * Since we removed the lazy loading in the 2.3 release, this is the code which bypasses lazy loading */ // routeHeaderVO.setDocumentContent(convertDocumentContent(routeHeader.getDocContent(), // routeHeader.getRouteHeaderId())); routeHeaderVO.setDocRouteLevel(routeHeader.getDocRouteLevel()); routeHeaderVO.setCurrentRouteNodeNames(routeHeader.getCurrentRouteLevelName()); /* * Collection activeNodes = * SpringServiceLocator.getRouteNodeService().getActiveNodeInstances(routeHeaderVO.getRouteHeaderId()); * routeHeaderVO.setNodeNames(new String[activeNodes.size()]); int index = 0; for (Iterator iterator = * activeNodes.iterator(); iterator.hasNext();) { RouteNodeInstance nodeInstance = (RouteNodeInstance) * iterator.next(); routeHeaderVO.getNodeNames()[index++] = nodeInstance.getRouteNode().getRouteNodeName(); } */ routeHeaderVO.setDocRouteStatus(routeHeader.getDocRouteStatus()); routeHeaderVO.setDocTitle(routeHeader.getDocTitle()); if (routeHeader.getDocumentType() != null) { routeHeaderVO.setDocTypeName(routeHeader.getDocumentType().getName()); routeHeaderVO.setDocumentUrl(routeHeader.getDocumentType().getDocHandlerUrl()); routeHeaderVO.setDocTypeId(routeHeader.getDocumentTypeId()); } routeHeaderVO.setDocVersion(routeHeader.getDocVersion()); routeHeaderVO.setInitiator(convertUser(routeHeader.getInitiatorUser())); routeHeaderVO.setRoutedByUser(convertUser(routeHeader.getRoutedByUser())); /* populate the routeHeaderVO with the document variables */ // FIXME: we assume there is only one for now RouteNodeInstance routeNodeInstance = (RouteNodeInstance) routeHeader.getInitialRouteNodeInstance(0); // Ok, we are using the "branch state" as the arbitrary convenient repository for flow/process/edoc variables // so we need to stuff them into the VO if (routeNodeInstance.getBranch() != null) { List listOfBranchStates = routeNodeInstance.getBranch().getBranchState(); Iterator it = listOfBranchStates.iterator(); while (it.hasNext()) { BranchState bs = (BranchState) it.next(); if (bs.getKey() != null && bs.getKey().startsWith(BranchState.VARIABLE_PREFIX)) { LOG.debug("Setting branch state variable on vo: " + bs.getKey() + "=" + bs.getValue()); routeHeaderVO.setVariable(bs.getKey().substring(BranchState.VARIABLE_PREFIX.length()), bs.getValue()); } } } } public static DocumentRouteHeaderValue convertRouteHeaderVO(RouteHeaderDTO routeHeaderVO) throws WorkflowException, KEWUserNotFoundException { DocumentRouteHeaderValue routeHeader = new DocumentRouteHeaderValue(); routeHeader.setAppDocId(routeHeaderVO.getAppDocId()); routeHeader.setApprovedDate(Utilities.convertCalendar(routeHeaderVO.getDateApproved())); routeHeader.setCreateDate(Utilities.convertCalendar(routeHeaderVO.getDateCreated())); // String updatedDocumentContent = buildUpdatedDocumentContent(routeHeaderVO); // if null is returned from this method it indicates that the document content on the route header // contained no changes, since we are creating a new document here, we will default the // document content approriately if no changes are detected on the incoming DocumentContentVO // if (updatedDocumentContent != null) { // routeHeader.setDocContent(updatedDocumentContent); // } else { // routeHeader.setDocContent(KEWConstants.DEFAULT_DOCUMENT_CONTENT); // } if (StringUtils.isEmpty(routeHeader.getDocContent())) { routeHeader.setDocContent(KEWConstants.DEFAULT_DOCUMENT_CONTENT); } routeHeader.setDocRouteLevel(routeHeaderVO.getDocRouteLevel()); routeHeader.setDocRouteStatus(routeHeaderVO.getDocRouteStatus()); routeHeader.setDocTitle(routeHeaderVO.getDocTitle()); if (routeHeaderVO.getDocTypeName() != null) { DocumentType documentType = KEWServiceLocator.getDocumentTypeService().findByName(routeHeaderVO.getDocTypeName()); if (documentType == null) { throw new DocumentTypeNotFoundException("Could not locate the given document type name: " + routeHeaderVO.getDocTypeName()); } routeHeader.setDocumentTypeId(documentType.getDocumentTypeId()); } routeHeader.setDocVersion(routeHeaderVO.getDocVersion()); routeHeader.setFinalizedDate(Utilities.convertCalendar(routeHeaderVO.getDateFinalized())); if (routeHeaderVO.getInitiator() != null) { routeHeader.setInitiatorWorkflowId(routeHeaderVO.getInitiator().getWorkflowId()); } if (routeHeaderVO.getRoutedByUser() != null) { routeHeader.setRoutedByUserWorkflowId(routeHeaderVO.getRoutedByUser().getWorkflowId()); } routeHeader.setRouteHeaderId(routeHeaderVO.getRouteHeaderId()); routeHeader.setStatusModDate(Utilities.convertCalendar(routeHeaderVO.getDateLastModified())); return routeHeader; } public static ActionItemDTO convertActionItem(ActionItem actionItem) throws KEWUserNotFoundException { ActionItemDTO actionItemVO = new ActionItemDTO(); actionItemVO.setActionItemId(actionItem.getActionItemId()); actionItemVO.setActionItemIndex(actionItem.getActionItemIndex()); actionItemVO.setActionRequestCd(actionItem.getActionRequestCd()); actionItemVO.setActionRequestId(actionItem.getActionRequestId()); actionItemVO.setActionToTake(actionItem.getActionToTake()); actionItemVO.setDateAssigned(actionItem.getDateAssigned()); actionItemVO.setDateAssignedString(actionItem.getDateAssignedString()); actionItemVO.setDelegationType(actionItem.getDelegationType()); actionItemVO.setDelegatorWorkflowId(actionItem.getDelegatorWorkflowId()); if (StringUtils.isNotEmpty(actionItem.getDelegatorWorkflowId())) { actionItemVO.setDelegatorUser(convertUser(actionItem.getDelegatorUser())); } actionItemVO.setDelegatorWorkgroupId(actionItem.getDelegatorWorkgroupId()); if (actionItem.getDelegatorWorkgroupId() != null) { actionItemVO.setDelegatorWorkgroup(convertWorkgroup(actionItem.getDelegatorWorkgroup())); } actionItemVO.setDocHandlerURL(actionItem.getDocHandlerURL()); actionItemVO.setDocLabel(actionItem.getDocLabel()); actionItemVO.setDocName(actionItem.getDocName()); actionItemVO.setDocTitle(actionItem.getDocTitle()); actionItemVO.setResponsibilityId(actionItem.getResponsibilityId()); actionItemVO.setRoleName(actionItem.getRoleName()); actionItemVO.setRouteHeaderId(actionItem.getRouteHeaderId()); actionItemVO.setWorkflowId(actionItem.getWorkflowId()); if (StringUtils.isNotEmpty(actionItem.getWorkflowId())) { actionItemVO.setUser(convertUser(actionItem.getUser())); } actionItemVO.setWorkgroupId(actionItem.getWorkgroupId()); if (actionItem.getWorkgroupId() != null) { actionItemVO.setWorkgroup(convertWorkgroup(actionItem.getWorkgroup())); } return actionItemVO; } /** * Converts the given DocumentContentVO to a document content string. This method considers existing content on the * document and updates approriately. The string returned will be the new document content for the document. If null is * returned, then the document content is unchanged. */ public static String buildUpdatedDocumentContent(DocumentContentDTO documentContentVO) throws WorkflowException { DocumentType documentType = null; String documentContent = KEWConstants.DEFAULT_DOCUMENT_CONTENT; try { // parse the existing content on the document String existingDocContent = KEWConstants.DEFAULT_DOCUMENT_CONTENT; if (documentContentVO.getRouteHeaderId() != null) { DocumentRouteHeaderValue document = KEWServiceLocator.getRouteHeaderService().getRouteHeader(documentContentVO.getRouteHeaderId()); documentType = document.getDocumentType(); existingDocContent = document.getDocContent(); } StandardDocumentContent standardDocContent = new StandardDocumentContent(existingDocContent); DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document document = builder.newDocument(); Element root = document.createElement(KEWConstants.DOCUMENT_CONTENT_ELEMENT); document.appendChild(root); Element applicationContentElement = standardDocContent.getApplicationContent(); if (documentContentVO.getApplicationContent() != null) { // application content has changed if (!Utilities.isEmpty(documentContentVO.getApplicationContent())) { applicationContentElement = document.createElement(KEWConstants.APPLICATION_CONTENT_ELEMENT); XmlHelper.appendXml(applicationContentElement, documentContentVO.getApplicationContent()); } else { // they've cleared the application content applicationContentElement = null; } } Element attributeContentElement = createDocumentContentSection(document, standardDocContent.getAttributeContent(), documentContentVO.getAttributeDefinitions(), documentContentVO.getAttributeContent(), KEWConstants.ATTRIBUTE_CONTENT_ELEMENT, documentType); Element searchableContentElement = createDocumentContentSection(document, standardDocContent.getSearchableContent(), documentContentVO.getSearchableDefinitions(), documentContentVO.getSearchableContent(), KEWConstants.SEARCHABLE_CONTENT_ELEMENT, documentType); if (applicationContentElement != null) { root.appendChild(applicationContentElement); } if (attributeContentElement != null) { root.appendChild(attributeContentElement); } if (searchableContentElement != null) { root.appendChild(searchableContentElement); } documentContent = XmlHelper.writeNode(document); } catch (Exception e) { handleException("Error parsing document content.", e); } return documentContent; } private static Element createDocumentContentSection(Document document, Element existingAttributeElement, WorkflowAttributeDefinitionDTO[] definitions, String content, String elementName, DocumentType documentType) throws Exception { Element contentSectionElement = existingAttributeElement; // if they've updated the content, we're going to re-build the content section element from scratch if (content != null) { if (!Utilities.isEmpty(content)) { contentSectionElement = document.createElement(elementName); // if they didn't merely clear the content, let's build the content section element by combining the children // of the incoming XML content Element incomingAttributeElement = XmlHelper.readXml(content).getDocumentElement(); NodeList children = incomingAttributeElement.getChildNodes(); for (int index = 0; index < children.getLength(); index++) { contentSectionElement.appendChild(document.importNode(children.item(index), true)); } } else { contentSectionElement = null; } } // if they have new definitions we're going to append those to the existing content section if (!Utilities.isEmpty(definitions)) { String errorMessage = ""; boolean inError = false; if (contentSectionElement == null) { contentSectionElement = document.createElement(elementName); } for (int index = 0; index < definitions.length; index++) { WorkflowAttributeDefinitionDTO definitionVO = definitions[index]; AttributeDefinition definition = convertWorkflowAttributeDefinitionVO(definitionVO, documentType); RuleAttribute ruleAttribute = definition.getRuleAttribute(); Object attribute = GlobalResourceLoader.getResourceLoader().getObject(definition.getObjectDefinition()); boolean propertiesAsMap = false; if (KEWConstants.RULE_XML_ATTRIBUTE_TYPE.equals(ruleAttribute.getType())) { ((GenericXMLRuleAttribute) attribute).setRuleAttribute(ruleAttribute); propertiesAsMap = true; } else if (KEWConstants.SEARCHABLE_XML_ATTRIBUTE_TYPE.equals(ruleAttribute.getType())) { ((GenericXMLSearchableAttribute) attribute).setRuleAttribute(ruleAttribute); propertiesAsMap = true; } if (propertiesAsMap) { for (PropertyDefinitionDTO propertyDefinitionVO : definitionVO.getProperties()) { if (attribute instanceof GenericXMLRuleAttribute) { ((GenericXMLRuleAttribute) attribute).getParamMap().put(propertyDefinitionVO.getName(), propertyDefinitionVO.getValue()); } else if (attribute instanceof GenericXMLSearchableAttribute) { ((GenericXMLSearchableAttribute) attribute).getParamMap().put(propertyDefinitionVO.getName(), propertyDefinitionVO.getValue()); } } } // validate inputs from client application if the attribute is capable if (attribute instanceof WorkflowAttributeXmlValidator) { List errors = ((WorkflowAttributeXmlValidator) attribute).validateClientRoutingData(); if (!errors.isEmpty()) { inError = true; errorMessage += "Error validating attribute " + definitions[index].getAttributeName() + " "; for (Iterator iter = errors.iterator(); iter.hasNext();) { WorkflowAttributeValidationError error = (WorkflowAttributeValidationError) iter.next(); errorMessage += error.getMessage() + " "; } } } // dont add to xml if attribute is in error if (!inError) { if (attribute instanceof WorkflowAttribute) { String attributeDocContent = ((WorkflowAttribute) attribute).getDocContent(); if (!StringUtils.isEmpty(attributeDocContent)) { XmlHelper.appendXml(contentSectionElement, attributeDocContent); } } else if (attribute instanceof SearchableAttribute) { String searcheAttributeContent = ((SearchableAttribute) attribute).getSearchContent(DocSearchUtils.getDocumentSearchContext("", documentType.getName(), "")); if (!StringUtils.isEmpty(searcheAttributeContent)) { XmlHelper.appendXml(contentSectionElement, searcheAttributeContent); } } } } if (inError) { throw new WorkflowRuntimeException(errorMessage); } } if (contentSectionElement != null) { // always be sure and import the element into the new document, if it originated from the existing doc content // and // appended to it, it will need to be imported contentSectionElement = (Element) document.importNode(contentSectionElement, true); } return contentSectionElement; } public static DocumentContentDTO convertDocumentContent(String documentContentValue, Long documentId) throws WorkflowException { if (documentContentValue == null) { return null; } DocumentContentDTO documentContentVO = new DocumentContentDTO(); // initialize the content fields documentContentVO.setApplicationContent(""); documentContentVO.setAttributeContent(""); documentContentVO.setSearchableContent(""); documentContentVO.setRouteHeaderId(documentId); try { DocumentContent documentContent = new StandardDocumentContent(documentContentValue); if (documentContent.getApplicationContent() != null) { documentContentVO.setApplicationContent(XmlHelper.writeNode(documentContent.getApplicationContent())); } if (documentContent.getAttributeContent() != null) { documentContentVO.setAttributeContent(XmlHelper.writeNode(documentContent.getAttributeContent())); } if (documentContent.getSearchableContent() != null) { documentContentVO.setSearchableContent(XmlHelper.writeNode(documentContent.getSearchableContent())); } } catch (Exception e) { handleException("Error parsing document content.", e); } return documentContentVO; } public static WorkgroupDTO convertWorkgroup(Workgroup workgroup) { if (workgroup == null) { return null; } WorkgroupDTO workgroupVO = new WorkgroupDTO(); workgroupVO.setActiveInd(workgroup.getActiveInd().booleanValue()); workgroupVO.setDescription(workgroup.getDescription()); workgroupVO.setWorkgroupId(workgroup.getWorkflowGroupId().getGroupId()); workgroupVO.setWorkgroupName(workgroup.getGroupNameId().getNameId()); workgroupVO.setWorkgroupType(workgroup.getWorkgroupType()); if (workgroup.getUsers() != null) { workgroupVO.setMembers(new UserDTO[workgroup.getUsers().size()]); int index = 0; for (Iterator iterator = workgroup.getUsers().iterator(); iterator.hasNext(); index++) { WorkflowUser user = (WorkflowUser) iterator.next(); workgroupVO.getMembers()[index] = convertUser(user); } } return workgroupVO; } public static UserDTO convertUser(WorkflowUser user) { if (user == null) { return null; } UserDTO userVO = new UserDTO(); userVO.setNetworkId(user.getAuthenticationUserId() == null ? null : user.getAuthenticationUserId().getAuthenticationId()); userVO.setUuId(user.getUuId() == null ? null : user.getUuId().getUuId()); userVO.setEmplId(user.getEmplId() == null ? null : user.getEmplId().getEmplId()); userVO.setWorkflowId(user.getWorkflowUserId() == null ? null : user.getWorkflowUserId().getWorkflowId()); userVO.setDisplayName(user.getDisplayName()); userVO.setLastName(user.getLastName()); userVO.setFirstName(user.getGivenName()); userVO.setEmailAddress(user.getEmailAddress()); // Preferences preferences = SpringServiceLocator.getPreferencesService().getPreferences(user); // userVO.setUserPreferencePopDocHandler(KEWConstants.PREFERENCES_YES_VAL.equals(preferences.getOpenNewWindow())); userVO.setUserPreferencePopDocHandler(true); return userVO; } public static WorkflowUser convertUserVO(UserDTO userVO) throws KEWUserNotFoundException { if (userVO == null) { return null; } UserId userId = null; if (userVO.getWorkflowId() != null) { userId = new WorkflowUserId(userVO.getWorkflowId()); } else if (userVO.getNetworkId() != null) { userId = new AuthenticationUserId(userVO.getNetworkId()); } else if (userVO.getEmplId() != null) { userId = new EmplId(userVO.getEmplId()); } else if (userVO.getUuId() != null) { userId = new UuId(userVO.getUuId()); } else { throw new KEWUserNotFoundException("Cannot convert the given UserVO, it does not contain any valid user ids."); } return KEWServiceLocator.getUserService().getWorkflowUser(userId); } public static DocumentTypeDTO convertDocumentType(DocumentType docType) { DocumentTypeDTO docTypeVO = new DocumentTypeDTO(); docTypeVO.setDocTypeParentId(docType.getDocTypeParentId()); if (docType.getParentDocType() != null) { docTypeVO.setDocTypeParentName(docType.getParentDocType().getName()); } docTypeVO.setDocTypeDescription(docType.getDescription()); docTypeVO.setDocTypeHandlerUrl(docType.getDocHandlerUrl()); docTypeVO.setDocTypeId(docType.getDocumentTypeId()); docTypeVO.setDocTypeLabel(docType.getLabel()); docTypeVO.setName(docType.getName()); docTypeVO.setDocTypeVersion(docType.getVersion()); Boolean currentInd = docType.getCurrentInd(); if (currentInd == null) { docTypeVO.setDocTypeCurrentInd(null); } else if (currentInd.booleanValue()) { docTypeVO.setDocTypeCurrentInd(KEWConstants.ACTIVE_CD); } else { docTypeVO.setDocTypeCurrentInd(KEWConstants.INACTIVE_CD); } docTypeVO.setPostProcessorName(docType.getPostProcessorName()); docTypeVO.setDocTypeJndiFactoryClass(null); docTypeVO.setDocTypeActiveInd(docType.getActiveInd().booleanValue()); if (docType.getParentDocType() != null) { docTypeVO.setDocTypeActiveInherited(true); } else { docTypeVO.setDocTypeActiveInherited(false); } docTypeVO.setDocTypePreApprovalPolicy(docType.getPreApprovePolicy().getPolicyValue().booleanValue()); Workgroup blanketWorkgroup = docType.getBlanketApproveWorkgroup(); if (blanketWorkgroup != null) { docTypeVO.setBlanketApproveWorkgroupId(blanketWorkgroup.getWorkflowGroupId().getGroupId()); } docTypeVO.setBlanketApprovePolicy(docType.getBlanketApprovePolicy()); if (CompatUtils.isRouteLevelCompatible(docType)) { List nodes = CompatUtils.getRouteLevelCompatibleNodeList(docType); RouteTemplateEntryDTO[] templates = new RouteTemplateEntryDTO[nodes.size()]; int index = 0; for (Iterator iterator = nodes.iterator(); iterator.hasNext();) { RouteNode node = (RouteNode) iterator.next(); templates[index++] = convertRouteTemplateEntry(node); } docTypeVO.setRouteTemplates(templates); } docTypeVO.setRoutePath(convertRoutePath(docType)); return docTypeVO; } public static RouteTemplateEntryDTO convertRouteTemplateEntry(RouteNode node) { RouteTemplateEntryDTO entryVO = new RouteTemplateEntryDTO(); entryVO.setFinalApprover(node.getFinalApprovalInd().booleanValue()); entryVO.setMandatoryRoute(node.getMandatoryRouteInd().booleanValue()); entryVO.setRouteLevel(CompatUtils.getLevelForNode(node.getDocumentType(), node.getRouteNodeName())); entryVO.setRouteLevelName(node.getRouteNodeName()); entryVO.setRouteMethodName(node.getRouteMethodName()); entryVO.setDocTypeId(node.getDocumentTypeId()); entryVO.setExceptionWorkgroupId(node.getExceptionWorkgroupId()); entryVO.setJrf_ver_nbr(node.getLockVerNbr()); entryVO.setMandatoryRoute(node.getMandatoryRouteInd().toString()); return entryVO; } public static RoutePathDTO convertRoutePath(DocumentType documentType) { RoutePathDTO routePath = new RoutePathDTO(); ProcessDTO[] processes = new ProcessDTO[documentType.getProcesses().size()]; int index = 0; for (Iterator iterator = documentType.getProcesses().iterator(); iterator.hasNext();) { Process process = (Process) iterator.next(); processes[index++] = convertProcess(process); } routePath.setProcesses(processes); return routePath; } public static ActionRequestDTO convertActionRequest(ActionRequestValue actionRequest) throws KEWUserNotFoundException { // TODO some newly added actionrequest properties are not here (delegation stuff) ActionRequestDTO actionRequestVO = new ActionRequestDTO(); actionRequestVO.setActionRequested(actionRequest.getActionRequested()); actionRequestVO.setActionRequestId(actionRequest.getActionRequestId()); if (actionRequest.getActionTaken() != null) { actionRequestVO.setActionTakenId(actionRequest.getActionTakenId()); actionRequestVO.setActionTaken(convertActionTaken(actionRequest.getActionTaken())); } actionRequestVO.setAnnotation(actionRequest.getAnnotation()); actionRequestVO.setDateCreated(Utilities.convertTimestamp(actionRequest.getCreateDate())); actionRequestVO.setDocVersion(actionRequest.getDocVersion()); actionRequestVO.setUserDTO(convertUser(actionRequest.getWorkflowUser())); if (actionRequest.getWorkflowId() != null) { // TODO switch this to a user vo actionRequestVO.setEmplyId(actionRequest.getWorkflowUser().getEmplId().getEmplId()); } actionRequestVO.setIgnorePrevAction(actionRequest.getIgnorePrevAction()); actionRequestVO.setPriority(actionRequest.getPriority()); actionRequestVO.setRecipientTypeCd(actionRequest.getRecipientTypeCd()); actionRequestVO.setResponsibilityDesc(actionRequest.getResponsibilityDesc()); actionRequestVO.setResponsibilityId(actionRequest.getResponsibilityId()); actionRequestVO.setRouteHeaderId(actionRequest.getRouteHeaderId()); actionRequestVO.setRouteLevel(actionRequest.getRouteLevel()); actionRequestVO.setNodeName(actionRequest.getPotentialNodeName()); actionRequestVO.setNodeInstanceId((actionRequest.getNodeInstance() == null ? null : actionRequest.getNodeInstance().getRouteNodeInstanceId())); // actionRequestVO.setRouteMethodName(actionRequest.getRouteMethodName()); // TODO delyea - should below be using actionRequest.getRoleName()? actionRequestVO.setRoleName(actionRequest.getQualifiedRoleName()); actionRequestVO.setQualifiedRoleName(actionRequest.getQualifiedRoleName()); actionRequestVO.setQualifiedRoleNameLabel(actionRequest.getQualifiedRoleNameLabel()); actionRequestVO.setStatus(actionRequest.getStatus()); if (actionRequest.isWorkgroupRequest()) { actionRequestVO.setWorkgroupId(actionRequest.getWorkgroupId()); actionRequestVO.setWorkgroupDTO(convertWorkgroup(actionRequest.getWorkgroup())); } actionRequestVO.setParentActionRequestId(actionRequest.getParentActionRequestId()); ActionRequestDTO[] childRequestVOs = new ActionRequestDTO[actionRequest.getChildrenRequests().size()]; int index = 0; for (Iterator iterator = actionRequest.getChildrenRequests().iterator(); iterator.hasNext();) { ActionRequestValue childRequest = (ActionRequestValue) iterator.next(); ActionRequestDTO childRequestVO = convertActionRequest(childRequest); childRequestVO.setParentActionRequest(actionRequestVO); childRequestVOs[index++] = childRequestVO; } actionRequestVO.setChildrenRequests(childRequestVOs); return actionRequestVO; } public static ActionTakenDTO convertActionTaken(ActionTakenValue actionTaken) throws KEWUserNotFoundException { if (actionTaken == null) { return null; } ActionTakenDTO actionTakenVO = new ActionTakenDTO(); actionTakenVO.setActionDate(Utilities.convertTimestamp(actionTaken.getActionDate())); actionTakenVO.setActionTaken(actionTaken.getActionTaken()); actionTakenVO.setActionTakenId(actionTaken.getActionTakenId()); actionTakenVO.setAnnotation(actionTaken.getAnnotation()); actionTakenVO.setDocVersion(actionTaken.getDocVersion()); actionTakenVO.setRouteHeaderId(actionTaken.getRouteHeaderId()); WorkflowUser user = actionTaken.getWorkflowUser(); if (user != null) { actionTakenVO.setUserDTO(convertUser(user)); } WorkflowUser delegator = actionTaken.getDelegatorUser(); if (delegator != null) { actionTakenVO.setDelegatorDTO(convertUser(delegator)); } return actionTakenVO; } public static WorkgroupIdDTO convertGroupId(GroupId groupId) { WorkgroupIdDTO workgroupId = null; if (groupId instanceof GroupNameId) { GroupNameId groupName = (GroupNameId) groupId; workgroupId = new WorkgroupNameIdDTO(groupName.getNameId()); } else if (groupId instanceof WorkflowGroupId) { WorkflowGroupId workflowGroupId = (WorkflowGroupId) groupId; workgroupId = new WorkflowGroupIdDTO(workflowGroupId.getGroupId()); } return workgroupId; } public static GroupId convertWorkgroupIdVO(WorkgroupIdDTO workgroupId) { GroupId groupId = null; if (workgroupId instanceof WorkgroupNameIdDTO) { WorkgroupNameIdDTO workgroupName = (WorkgroupNameIdDTO) workgroupId; groupId = new GroupNameId(workgroupName.getWorkgroupName()); } else if (workgroupId instanceof WorkflowGroupIdDTO) { WorkflowGroupIdDTO workflowGroupId = (WorkflowGroupIdDTO) workgroupId; groupId = new WorkflowGroupId(workflowGroupId.getWorkgroupId()); } return groupId; } public static UserIdDTO convertUserId(UserId userId) { UserIdDTO userIdVO = null; if (userId instanceof AuthenticationUserId) { AuthenticationUserId id = (AuthenticationUserId) userId; userIdVO = new NetworkIdDTO(id.getAuthenticationId()); } else if (userId instanceof EmplId) { EmplId id = (EmplId) userId; userIdVO = new EmplIdDTO(id.getEmplId()); } else if (userId instanceof UuId) { UuId id = (UuId) userId; userIdVO = new UuIdDTO(id.getUuId()); } else if (userId instanceof WorkflowUserId) { WorkflowUserId id = (WorkflowUserId) userId; userIdVO = new WorkflowIdDTO(id.getWorkflowId()); } return userIdVO; } public static UserId convertUserIdVO(UserIdDTO userIdVO) { UserId userId = null; if (userIdVO instanceof NetworkIdDTO) { NetworkIdDTO id = (NetworkIdDTO) userIdVO; userId = new AuthenticationUserId(id.getNetworkId()); if (userId.isEmpty()) { throw new RuntimeException("Attempting to use empty NetworkId"); } } else if (userIdVO instanceof EmplIdDTO) { EmplIdDTO id = (EmplIdDTO) userIdVO; userId = new EmplId(id.getEmplId()); if (userId.isEmpty()) { throw new RuntimeException("Attempting to use empty EmplId"); } } else if (userIdVO instanceof UuIdDTO) { UuIdDTO id = (UuIdDTO) userIdVO; userId = new UuId(id.getUuId()); if (userId.isEmpty()) { throw new RuntimeException("Attempting to use empty UuId"); } } else if (userIdVO instanceof WorkflowIdDTO) { WorkflowIdDTO id = (WorkflowIdDTO) userIdVO; userId = new WorkflowUserId(id.getWorkflowId()); if (userId.isEmpty()) { throw new RuntimeException("Attempting to use empty WorkflowId"); } } return userId; } public static ResponsiblePartyDTO convertResponsibleParty(ResponsibleParty responsibleParty) { if (responsibleParty == null) { return null; } ResponsiblePartyDTO responsiblePartyVO = new ResponsiblePartyDTO(); responsiblePartyVO.setWorkgroupId(DTOConverter.convertGroupId(responsibleParty.getGroupId())); responsiblePartyVO.setUserId(DTOConverter.convertUserId(responsibleParty.getUserId())); responsiblePartyVO.setRoleName(responsibleParty.getRoleName()); return responsiblePartyVO; } public static ResponsibleParty convertResponsiblePartyVO(ResponsiblePartyDTO responsiblePartyVO) { if (responsiblePartyVO == null) { return null; } ResponsibleParty responsibleParty = new ResponsibleParty(); responsibleParty.setGroupId(DTOConverter.convertWorkgroupIdVO(responsiblePartyVO.getWorkgroupId())); responsibleParty.setUserId(DTOConverter.convertUserIdVO(responsiblePartyVO.getUserId())); responsibleParty.setRoleName(responsiblePartyVO.getRoleName()); return responsibleParty; } /** * refactor name to convertResponsiblePartyVO when ResponsibleParty object is gone * * @param responsiblePartyVO * @return * @throws KEWUserNotFoundException */ public static Recipient convertResponsiblePartyVOtoRecipient(ResponsiblePartyDTO responsiblePartyVO) throws KEWUserNotFoundException { if (responsiblePartyVO == null) { return null; } if (responsiblePartyVO.getRoleName() != null) { return new RoleRecipient(responsiblePartyVO.getRoleName()); } GroupId groupId = convertWorkgroupIdVO(responsiblePartyVO.getWorkgroupId()); if (groupId != null) { return KEWServiceLocator.getWorkgroupService().getWorkgroup(groupId); } UserId userId = convertUserIdVO(responsiblePartyVO.getUserId()); if (userId != null) { return KEWServiceLocator.getUserService().getWorkflowUser(userId); } throw new WorkflowRuntimeException("ResponsibleParty of unknown type"); } /** * Converts an ActionRequestVO to an ActionRequest. The ActionRequestVO passed in must be the root action request in the * graph, otherwise an IllegalArgumentException is thrown. This is to avoid potentially sticky issues with circular * references in the conversion. NOTE: This method's primary purpose is to convert ActionRequestVOs returned from a * RouteModule. Incidentally, the VO's returned from the route module will be lacking some information (like the node * instance) so no attempts are made to convert this data since further initialization is handled by a higher level * component (namely ActionRequestService.initializeActionRequestGraph). */ public static ActionRequestValue convertActionRequestVO(ActionRequestDTO actionRequestVO) throws KEWUserNotFoundException { if (actionRequestVO == null) { return null; } if (actionRequestVO.getParentActionRequest() != null || actionRequestVO.getParentActionRequestId() != null) { throw new IllegalArgumentException("Cannot convert a non-root ActionRequestVO"); } ActionRequestValue actionRequest = new ActionRequestFactory().createBlankActionRequest(); populateActionRequest(actionRequest, actionRequestVO); if (actionRequestVO.getChildrenRequests() != null) { for (int i = 0; i < actionRequestVO.getChildrenRequests().length; i++) { ActionRequestDTO childVO = actionRequestVO.getChildrenRequests()[i]; actionRequest.getChildrenRequests().add(convertActionRequestVO(childVO, actionRequest)); } } return actionRequest; } public static ActionRequestValue convertActionRequestVO(ActionRequestDTO actionRequestVO, ActionRequestValue parentActionRequest) throws KEWUserNotFoundException { if (actionRequestVO == null) { return null; } ActionRequestValue actionRequest = new ActionRequestFactory().createBlankActionRequest(); populateActionRequest(actionRequest, actionRequestVO); actionRequest.setParentActionRequest(parentActionRequest); actionRequest.setParentActionRequestId(parentActionRequest.getActionRequestId()); if (actionRequestVO.getChildrenRequests() != null) { for (int i = 0; i < actionRequestVO.getChildrenRequests().length; i++) { ActionRequestDTO childVO = actionRequestVO.getChildrenRequests()[i]; actionRequest.getChildrenRequests().add(convertActionRequestVO(childVO, actionRequest)); } } return actionRequest; } /** * This method converts everything except for the parent and child requests */ private static void populateActionRequest(ActionRequestValue actionRequest, ActionRequestDTO actionRequestVO) throws KEWUserNotFoundException { actionRequest.setActionRequested(actionRequestVO.getActionRequested()); actionRequest.setActionRequestId(actionRequestVO.getActionRequestId()); actionRequest.setActionTakenId(actionRequestVO.getActionTakenId()); actionRequest.setAnnotation(actionRequestVO.getAnnotation()); actionRequest.setApprovePolicy(actionRequestVO.getApprovePolicy()); actionRequest.setCreateDate(new Timestamp(new Date().getTime())); actionRequest.setCurrentIndicator(actionRequestVO.getCurrentIndicator()); actionRequest.setDelegationType(actionRequestVO.getDelegationType()); actionRequest.setDocVersion(actionRequestVO.getDocVersion()); actionRequest.setIgnorePrevAction(actionRequestVO.getIgnorePrevAction()); actionRequest.setPriority(actionRequestVO.getPriority()); actionRequest.setQualifiedRoleName(actionRequestVO.getQualifiedRoleName()); actionRequest.setQualifiedRoleNameLabel(actionRequestVO.getQualifiedRoleNameLabel()); actionRequest.setRecipientTypeCd(actionRequestVO.getRecipientTypeCd()); actionRequest.setResponsibilityDesc(actionRequestVO.getResponsibilityDesc()); actionRequest.setResponsibilityId(actionRequestVO.getResponsibilityId()); actionRequest.setRoleName(actionRequestVO.getRoleName()); Long routeHeaderId = actionRequestVO.getRouteHeaderId(); if (routeHeaderId != null) { actionRequest.setRouteHeaderId(routeHeaderId); actionRequest.setRouteHeader(KEWServiceLocator.getRouteHeaderService().getRouteHeader(routeHeaderId)); } // properties set in routemanagerservice actionRequest.setRouteLevel(actionRequestVO.getRouteLevel()); // TODO add the node instance to the VO // actionRequest.setRouteMethodName(actionRequestVO.getRouteMethodName()); actionRequest.setStatus(actionRequestVO.getStatus()); // TODO this should be moved to a validate somewhere's... boolean userSet = false; if (actionRequestVO.getUserIdVO() != null) { UserId userId = convertUserIdVO(actionRequestVO.getUserIdVO()); WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(userId); actionRequest.setWorkflowId(user.getWorkflowId()); userSet = true; } else if (actionRequestVO.getEmplyId() != null) { WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(new EmplId(actionRequestVO.getEmplyId())); actionRequest.setWorkflowId(user.getWorkflowId()); userSet = true; } else if (actionRequestVO.getUserDTO() != null) { WorkflowUser user = convertUserVO(actionRequestVO.getUserDTO()); actionRequest.setWorkflowId(user.getWorkflowId()); userSet = true; } if (actionRequestVO.getWorkgroupId() != null) { Long workgroupId = actionRequestVO.getWorkgroupId(); // validate that the workgroup is good. Workgroup workgroup = KEWServiceLocator.getWorkgroupService().getWorkgroup(new WorkflowGroupId(workgroupId)); if (workgroup == null) { throw new RuntimeException("Workgroup Id " + workgroupId + " is invalid. Action Request cannot be activated."); } actionRequest.setWorkgroupId(workgroupId); userSet = true; } else if (actionRequestVO.getWorkgroupDTO() != null) { Long workgroupId = actionRequestVO.getWorkgroupDTO().getWorkgroupId(); // validate that the workgroup is good. Workgroup workgroup = KEWServiceLocator.getWorkgroupService().getWorkgroup(new WorkflowGroupId(workgroupId)); if (workgroup == null) { throw new RuntimeException("Workgroup Id " + workgroupId + " is invalid. Action Request cannot be activated."); } actionRequest.setWorkgroupId(workgroupId); userSet = true; } // TODO role requests will not have a user or workgroup, so this code needs to handle that case if (!userSet) { throw new RuntimeException("Post processor didn't set a user or workgroup on the request"); } } public static ActionTakenValue convertActionTakenVO(ActionTakenDTO actionTakenVO) throws KEWUserNotFoundException { if (actionTakenVO == null) { return null; } ActionTakenValue actionTaken = new ActionTakenValue(); actionTaken.setActionDate(new Timestamp(actionTakenVO.getActionDate().getTimeInMillis())); actionTaken.setActionTaken(actionTakenVO.getActionTaken()); actionTaken.setActionTakenId(actionTakenVO.getActionTakenId()); actionTaken.setAnnotation(actionTakenVO.getAnnotation()); actionTaken.setCurrentIndicator(Boolean.TRUE); WorkflowUser delegator = convertUserVO(actionTakenVO.getDelegatorDTO()); actionTaken.setDelegator(delegator); if (delegator != null) { actionTaken.setDelegatorWorkflowId(delegator.getWorkflowUserId().getWorkflowId()); } actionTaken.setDocVersion(actionTakenVO.getDocVersion()); DocumentRouteHeaderValue routeHeader = KEWServiceLocator.getRouteHeaderService().getRouteHeader(actionTakenVO.getRouteHeaderId()); actionTaken.setRouteHeader(routeHeader); actionTaken.setRouteHeaderId(actionTaken.getRouteHeaderId()); WorkflowUser user = convertUserVO(actionTakenVO.getUserDTO()); actionTaken.setWorkflowId(user.getWorkflowUserId().getWorkflowId()); return actionTaken; } public static DocumentRouteStatusChangeDTO convertDocumentRouteStatusChange(DocumentRouteStatusChange statusChange) { if (statusChange == null) { return null; } DocumentRouteStatusChangeDTO statusChangeVO = new DocumentRouteStatusChangeDTO(); statusChangeVO.setRouteHeaderId(statusChange.getRouteHeaderId()); statusChangeVO.setAppDocId(statusChange.getAppDocId()); statusChangeVO.setOldRouteStatus(statusChange.getOldRouteStatus()); statusChangeVO.setNewRouteStatus(statusChange.getNewRouteStatus()); return statusChangeVO; } public static DocumentRouteLevelChangeDTO convertDocumentRouteLevelChange(DocumentRouteLevelChange routeLevelChange) { if (routeLevelChange == null) { return null; } DocumentRouteLevelChangeDTO routeLevelChangeVO = new DocumentRouteLevelChangeDTO(); routeLevelChangeVO.setRouteHeaderId(routeLevelChange.getRouteHeaderId()); routeLevelChangeVO.setAppDocId(routeLevelChange.getAppDocId()); routeLevelChangeVO.setOldRouteLevel(routeLevelChange.getOldRouteLevel()); routeLevelChangeVO.setNewRouteLevel(routeLevelChange.getNewRouteLevel()); routeLevelChangeVO.setOldNodeName(routeLevelChange.getOldNodeName()); routeLevelChangeVO.setNewNodeName(routeLevelChange.getNewNodeName()); routeLevelChangeVO.setOldNodeInstanceId(routeLevelChange.getOldNodeInstanceId()); routeLevelChangeVO.setNewNodeInstanceId(routeLevelChange.getNewNodeInstanceId()); return routeLevelChangeVO; } public static DeleteEventDTO convertDeleteEvent(DeleteEvent deleteEvent) { if (deleteEvent == null) { return null; } DeleteEventDTO deleteEventVO = new DeleteEventDTO(); deleteEventVO.setRouteHeaderId(deleteEvent.getRouteHeaderId()); deleteEventVO.setAppDocId(deleteEvent.getAppDocId()); return deleteEventVO; } public static ActionTakenEventDTO convertActionTakenEvent(ActionTakenEvent actionTakenEvent) throws KEWUserNotFoundException { if (actionTakenEvent == null) { return null; } ActionTakenEventDTO actionTakenEventVO = new ActionTakenEventDTO(); actionTakenEventVO.setRouteHeaderId(actionTakenEvent.getRouteHeaderId()); actionTakenEventVO.setAppDocId(actionTakenEvent.getAppDocId()); actionTakenEventVO.setActionTaken(convertActionTaken(actionTakenEvent.getActionTaken())); return actionTakenEventVO; } public static BeforeProcessEventDTO convertBeforeProcessEvent(BeforeProcessEvent event) throws KEWUserNotFoundException { if (event == null) { return null; } BeforeProcessEventDTO beforeProcessEvent = new BeforeProcessEventDTO(); beforeProcessEvent.setRouteHeaderId(event.getRouteHeaderId()); beforeProcessEvent.setAppDocId(event.getAppDocId()); beforeProcessEvent.setNodeInstanceId(event.getNodeInstanceId()); return beforeProcessEvent; } public static AfterProcessEventDTO convertAfterProcessEvent(AfterProcessEvent event) throws KEWUserNotFoundException { if (event == null) { return null; } AfterProcessEventDTO afterProcessEvent = new AfterProcessEventDTO(); afterProcessEvent.setRouteHeaderId(event.getRouteHeaderId()); afterProcessEvent.setAppDocId(event.getAppDocId()); afterProcessEvent.setNodeInstanceId(event.getNodeInstanceId()); afterProcessEvent.setSuccessfullyProcessed(event.isSuccessfullyProcessed()); return afterProcessEvent; } public static AttributeDefinition convertWorkflowAttributeDefinitionVO(WorkflowAttributeDefinitionDTO definitionVO, org.kuali.rice.kew.doctype.bo.DocumentType documentType) { if (definitionVO == null) { return null; } // get the rule attribute so we can get's it's message antity and not blow up if it's remote RuleAttribute ruleAttribute = KEWServiceLocator.getRuleAttributeService().findByClassName(definitionVO.getAttributeName()); if (ruleAttribute == null) { ruleAttribute = KEWServiceLocator.getRuleAttributeService().findByName(definitionVO.getAttributeName()); } if (ruleAttribute == null) { throw new WorkflowRuntimeException("Attribute " + definitionVO.getAttributeName() + " not found"); } ObjectDefinition definition = new ObjectDefinition(ruleAttribute.getClassName()); for (int index = 0; index < definitionVO.getConstructorParameters().length; index++) { String parameter = definitionVO.getConstructorParameters()[index]; definition.addConstructorParameter(new DataDefinition(parameter, String.class)); } boolean propertiesAsMap = KEWConstants.RULE_XML_ATTRIBUTE_TYPE.equals(ruleAttribute.getType()) || KEWConstants.SEARCHABLE_XML_ATTRIBUTE_TYPE.equals(ruleAttribute.getType()); if (!propertiesAsMap) { for (int index = 0; index < definitionVO.getProperties().length; index++) { PropertyDefinitionDTO propertyDefVO = definitionVO.getProperties()[index]; definition.addProperty(new PropertyDefinition(propertyDefVO.getName(), new DataDefinition(propertyDefVO.getValue(), String.class))); } } // this is likely from an EDL validate call and ME may needed to be added to the AttDefinitionVO. if (ruleAttribute.getMessageEntity() != null) { definition.setMessageEntity(ruleAttribute.getMessageEntity()); } else { // get the me from the document type if it's been passed in - the document is having action taken on it. if (documentType != null) { definition.setMessageEntity(documentType.getMessageEntity()); } } return new AttributeDefinition(ruleAttribute, definition); } public static DocumentDetailDTO convertDocumentDetail(DocumentRouteHeaderValue routeHeader) throws WorkflowException { if (routeHeader == null) { return null; } DocumentDetailDTO detail = new DocumentDetailDTO(); populateRouteHeaderVO(detail, routeHeader); Map nodeInstances = new HashMap(); List actionRequestVOs = new ArrayList(); List rootActionRequests = KEWServiceLocator.getActionRequestService().getRootRequests(routeHeader.getActionRequests()); for (Iterator iterator = rootActionRequests.iterator(); iterator.hasNext();) { ActionRequestValue actionRequest = (ActionRequestValue) iterator.next(); actionRequestVOs.add(convertActionRequest(actionRequest)); RouteNodeInstance nodeInstance = actionRequest.getNodeInstance(); if (nodeInstance == null) { continue; } if (nodeInstance.getRouteNodeInstanceId() == null) { throw new WorkflowException("Error creating document detail structure because of NULL node instance id."); } nodeInstances.put(nodeInstance.getRouteNodeInstanceId(), nodeInstance); } detail.setActionRequests((ActionRequestDTO[]) actionRequestVOs.toArray(new ActionRequestDTO[0])); List nodeInstanceVOs = new ArrayList(); for (Iterator iterator = nodeInstances.values().iterator(); iterator.hasNext();) { RouteNodeInstance nodeInstance = (RouteNodeInstance) iterator.next(); nodeInstanceVOs.add(convertRouteNodeInstance(nodeInstance)); } detail.setNodeInstances((RouteNodeInstanceDTO[]) nodeInstanceVOs.toArray(new RouteNodeInstanceDTO[0])); List actionTakenVOs = new ArrayList(); for (Iterator iterator = routeHeader.getActionsTaken().iterator(); iterator.hasNext();) { ActionTakenValue actionTaken = (ActionTakenValue) iterator.next(); actionTakenVOs.add(convertActionTaken(actionTaken)); } detail.setActionsTaken((ActionTakenDTO[]) actionTakenVOs.toArray(new ActionTakenDTO[0])); return detail; } public static RouteNodeInstanceDTO convertRouteNodeInstance(RouteNodeInstance nodeInstance) throws WorkflowException { if (nodeInstance == null) { return null; } RouteNodeInstanceDTO nodeInstanceVO = new RouteNodeInstanceDTO(); nodeInstanceVO.setActive(nodeInstance.isActive()); nodeInstanceVO.setBranchId(nodeInstance.getBranch().getBranchId()); nodeInstanceVO.setComplete(nodeInstance.isComplete()); nodeInstanceVO.setDocumentId(nodeInstance.getDocumentId()); nodeInstanceVO.setInitial(nodeInstance.isInitial()); nodeInstanceVO.setName(nodeInstance.getName()); nodeInstanceVO.setProcessId(nodeInstance.getProcess() != null ? nodeInstance.getProcess().getRouteNodeInstanceId() : null); nodeInstanceVO.setRouteNodeId(nodeInstance.getRouteNode().getRouteNodeId()); nodeInstanceVO.setRouteNodeInstanceId(nodeInstance.getRouteNodeInstanceId()); nodeInstanceVO.setState(convertStates(nodeInstance.getState())); nodeInstanceVO.setNextNodes(new RouteNodeInstanceDTO[nodeInstance.getNextNodeInstances().size()]); int i = 0; for (Iterator iter = nodeInstance.getNextNodeInstances().iterator(); iter.hasNext(); i++) { RouteNodeInstance nextNodeInstance = (RouteNodeInstance) iter.next(); nodeInstanceVO.getNextNodes()[i] = convertRouteNodeInstance(nextNodeInstance); } return nodeInstanceVO; } public static StateDTO[] convertStates(Collection states) { if (states == null) { return null; } StateDTO[] stateVOs = new StateDTO[states.size()]; int index = 0; for (Iterator iterator = states.iterator(); iterator.hasNext();) { State state = (State) iterator.next(); stateVOs[index++] = convertState(state); } return stateVOs; } public static StateDTO convertState(State nodeState) { if (nodeState == null) { return null; } StateDTO stateVO = new StateDTO(); stateVO.setStateId(nodeState.getStateId()); stateVO.setKey(nodeState.getKey()); stateVO.setValue(nodeState.getValue()); return stateVO; } public static RouteNodeDTO convertRouteNode(RouteNode node) { if (node == null) { return null; } RouteNodeDTO nodeVO = new RouteNodeDTO(); nodeVO.setActivationType(node.getActivationType()); nodeVO.setBranchName(node.getBranch() != null ? node.getBranch().getName() : null); nodeVO.setDocumentTypeId(node.getDocumentTypeId()); try { nodeVO.setExceptionWorkgroup(convertWorkgroup(node.getExceptionWorkgroup())); } catch (KEWUserNotFoundException e) { throw new WorkflowRuntimeException("Could not locate users in exception workgroup for node " + node.getRouteNodeId() + ".", e); } nodeVO.setFinalApprovalInd(node.getFinalApprovalInd().booleanValue()); nodeVO.setMandatoryRouteInd(node.getMandatoryRouteInd().booleanValue()); nodeVO.setNodeType(node.getNodeType()); nodeVO.setRouteMethodCode(node.getRouteMethodCode()); nodeVO.setRouteMethodName(node.getRouteMethodName()); nodeVO.setRouteNodeId(node.getRouteNodeId()); nodeVO.setRouteNodeName(node.getRouteNodeName()); int index = 0; Long[] previousNodeIds = new Long[node.getPreviousNodes().size()]; for (Iterator iterator = node.getPreviousNodes().iterator(); iterator.hasNext();) { RouteNode prevNode = (RouteNode) iterator.next(); previousNodeIds[index++] = prevNode.getRouteNodeId(); } nodeVO.setPreviousNodeIds(previousNodeIds); index = 0; Long[] nextNodeIds = new Long[node.getNextNodes().size()]; for (Iterator iterator = node.getNextNodes().iterator(); iterator.hasNext();) { RouteNode nextNode = (RouteNode) iterator.next(); nextNodeIds[index++] = nextNode.getRouteNodeId(); } nodeVO.setNextNodeIds(nextNodeIds); return nodeVO; } public static ProcessDTO convertProcess(Process process) { ProcessDTO processVO = new ProcessDTO(); processVO.setInitial(process.isInitial()); processVO.setInitialRouteNode(convertRouteNode(process.getInitialRouteNode())); processVO.setName(process.getName()); processVO.setProcessId(process.getProcessId()); return processVO; } public static MovePoint convertMovePointVO(MovePointDTO movePointVO) { MovePoint movePoint = new MovePoint(); movePoint.setStartNodeName(movePointVO.getStartNodeName()); movePoint.setStepsToMove(movePointVO.getStepsToMove()); return movePoint; } public static AdHocRevoke convertAdHocRevokeVO(AdHocRevokeDTO revokeVO) throws WorkflowException { AdHocRevoke revoke = new AdHocRevoke(); revoke.setActionRequestId(revokeVO.getActionRequestId()); revoke.setNodeName(revokeVO.getNodeName()); if (revokeVO.getUserId() != null) { revoke.setUser(KEWServiceLocator.getUserService().getWorkflowUser(revokeVO.getUserId())); } if (revokeVO.getWorkgroupId() != null) { revoke.setWorkgroup(KEWServiceLocator.getWorkgroupService().getWorkgroup(revokeVO.getWorkgroupId())); } return revoke; } public static WorkflowAttributeValidationErrorDTO convertWorkflowAttributeValidationError(WorkflowAttributeValidationError error) { return new WorkflowAttributeValidationErrorDTO(error.getKey(), error.getMessage()); } // Method added for updating notes on server sites based on NoteVO change. Modfy on April 7, 2006 public static void updateNotes(RouteHeaderDTO routeHeaderVO, Long routeHeaderId) { NoteDTO[] notes = routeHeaderVO.getNotes(); NoteDTO[] notesToDelete = routeHeaderVO.getNotesToDelete(); Note noteToDelete = null; Note noteToSave = null; // Add or update notes to note table based on notes array in RouteHeaderVO if (notes != null) { for (int i = 0; i < notes.length; i++) { if (notes[i] != null) { noteToSave = new Note(); noteToSave.setNoteId(notes[i].getNoteId()); noteToSave.setRouteHeaderId(routeHeaderId); noteToSave.setNoteAuthorWorkflowId(notes[i].getNoteAuthorWorkflowId()); noteToSave.setNoteCreateDate(Utilities.convertCalendar(notes[i].getNoteCreateDate())); noteToSave.setNoteText(notes[i].getNoteText()); noteToSave.setLockVerNbr(notes[i].getLockVerNbr()); // if notes[i].getNoteId() == null, add note to note table, otherwise update note to note table getNoteService().saveNote(noteToSave); } } } // Delete notes from note table based on notesToDelete array in RouteHeaderVO if (notesToDelete != null) { for (int i = 0; i < notesToDelete.length; i++) { noteToDelete = getNoteService().getNoteByNoteId(notesToDelete[i].getNoteId()); if (noteToDelete != null) { getNoteService().deleteNote(noteToDelete); } } routeHeaderVO.setNotesToDelete(null); } } private static NoteService getNoteService() { return (NoteService) KEWServiceLocator.getService(KEWServiceLocator.NOTE_SERVICE); } private static NoteDTO[] convertNotesArrayListToNoteVOArray(List notesArrayList) { if (notesArrayList.size() > 0) { NoteDTO[] noteVOArray = new NoteDTO[notesArrayList.size()]; int i = 0; Note tempNote; NoteDTO tempNoteVO; for (Iterator it = notesArrayList.iterator(); it.hasNext();) { tempNote = (Note) it.next(); tempNoteVO = new NoteDTO(); tempNoteVO.setNoteId(tempNote.getNoteId()); tempNoteVO.setRouteHeaderId(tempNote.getRouteHeaderId()); tempNoteVO.setNoteAuthorWorkflowId(tempNote.getNoteAuthorWorkflowId()); tempNoteVO.setNoteCreateDate(Utilities.convertTimestamp(tempNote.getNoteCreateDate())); tempNoteVO.setNoteText(tempNote.getNoteText()); tempNoteVO.setLockVerNbr(tempNote.getLockVerNbr()); noteVOArray[i] = tempNoteVO; i++; } return noteVOArray; } else { return null; } } public static SimulationCriteria convertReportCriteriaDTO(ReportCriteriaDTO criteriaVO) throws KEWUserNotFoundException { if (criteriaVO == null) { return null; } SimulationCriteria criteria = new SimulationCriteria(); criteria.setDestinationNodeName(criteriaVO.getTargetNodeName()); criteria.setDocumentId(criteriaVO.getRouteHeaderId()); criteria.setDocumentTypeName(criteriaVO.getDocumentTypeName()); criteria.setXmlContent(criteriaVO.getXmlContent()); criteria.setActivateRequests(criteriaVO.getActivateRequests()); if (criteriaVO.getRoutingUser() != null) { WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(criteriaVO.getRoutingUser()); if (user == null) { throw new KEWUserNotFoundException("Could not locate user for the given id: " + criteriaVO.getRoutingUser()); } criteria.setRoutingUser(user); } if (criteriaVO.getRuleTemplateNames() != null) { for (int index = 0; index < criteriaVO.getRuleTemplateNames().length; index++) { String ruleTemplateName = criteriaVO.getRuleTemplateNames()[index]; criteria.getRuleTemplateNames().add(ruleTemplateName); } } if (criteriaVO.getNodeNames() != null) { for (int i = 0; i < criteriaVO.getNodeNames().length; i++) { String nodeName = criteriaVO.getNodeNames()[i]; criteria.getNodeNames().add(nodeName); } } if (criteriaVO.getTargetUsers() != null) { for (int index = 0; index < criteriaVO.getTargetUsers().length; index++) { UserIdDTO userIdVO = criteriaVO.getTargetUsers()[index]; WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(userIdVO); if (user == null) { throw new KEWUserNotFoundException("Could not locate user for the given id: " + userIdVO); } criteria.getDestinationRecipients().add(user); } } if (criteriaVO.getActionsToTake() != null) { for (int index = 0; index < criteriaVO.getActionsToTake().length; index++) { ReportActionToTakeDTO actionToTakeVO = criteriaVO.getActionsToTake()[index]; criteria.getActionsToTake().add(convertReportActionToTakeVO(actionToTakeVO)); } } return criteria; } public static SimulationActionToTake convertReportActionToTakeVO(ReportActionToTakeDTO actionToTakeVO) throws KEWUserNotFoundException { if (actionToTakeVO == null) { return null; } SimulationActionToTake actionToTake = new SimulationActionToTake(); actionToTake.setNodeName(actionToTakeVO.getNodeName()); if (StringUtils.isBlank(actionToTakeVO.getActionToPerform())) { throw new IllegalArgumentException("ReportActionToTakeVO must contain an action taken code and does not"); } actionToTake.setActionToPerform(actionToTakeVO.getActionToPerform()); if (actionToTakeVO.getUserIdVO() == null) { throw new IllegalArgumentException("ReportActionToTakeVO must contain a userId and does not"); } WorkflowUser user = KEWServiceLocator.getUserService().getWorkflowUser(actionToTakeVO.getUserIdVO()); if (user == null) { throw new KEWUserNotFoundException("Could not locate user for the given id: " + actionToTakeVO.getUserIdVO()); } actionToTake.setUser(user); return actionToTake; } public static RuleDelegationDTO convertRuleDelegation(RuleDelegation ruleDelegation) throws WorkflowException { if (ruleDelegation == null) { return null; } RuleDelegationDTO ruleDelegationVO = new RuleDelegationDTO(); ruleDelegationVO.setDelegationType(ruleDelegation.getDelegationType()); ruleDelegationVO.setDelegationRule(convertRule(ruleDelegation.getDelegationRuleBaseValues())); return ruleDelegationVO; } // public static RuleDelegation convertRuleExtensionVO(RuleExtensionVO ruleExtensionVO) throws WorkflowException {} public static Collection<RuleExtensionDTO> convertRuleExtension(RuleExtension ruleExtension) throws WorkflowException { if (ruleExtension == null) { return null; } List<RuleExtensionDTO> extensionVOs = new ArrayList<RuleExtensionDTO>(); for (Iterator iter = ruleExtension.getExtensionValues().iterator(); iter.hasNext();) { RuleExtensionValue extensionValue = (RuleExtensionValue) iter.next(); extensionVOs.add(new RuleExtensionDTO(extensionValue.getKey(), extensionValue.getValue())); } return extensionVOs; } public static KeyValuePair convertRuleExtensionVO(RuleExtensionDTO ruleExtensionVO) throws WorkflowException { if (ruleExtensionVO == null) { return null; } return new KeyValuePair(ruleExtensionVO.getKey(), ruleExtensionVO.getValue()); } public static RuleResponsibilityDTO convertRuleResponsibility(RuleResponsibility ruleResponsibility) throws WorkflowException { if (ruleResponsibility == null) { return null; } RuleResponsibilityDTO ruleResponsibilityVO = new RuleResponsibilityDTO(); ruleResponsibilityVO.setActionRequestedCd(ruleResponsibility.getActionRequestedCd()); ruleResponsibilityVO.setApprovePolicy(ruleResponsibility.getApprovePolicy()); ruleResponsibilityVO.setPriority(ruleResponsibility.getPriority()); ruleResponsibilityVO.setResponsibilityId(ruleResponsibility.getResponsibilityId()); ruleResponsibilityVO.setRoleName(ruleResponsibility.getRole()); ruleResponsibilityVO.setUser(convertUser(ruleResponsibility.getWorkflowUser())); ruleResponsibilityVO.setWorkgroup(convertWorkgroup(ruleResponsibility.getWorkgroup())); for (Iterator iter = ruleResponsibility.getDelegationRules().iterator(); iter.hasNext();) { RuleDelegation ruleDelegation = (RuleDelegation) iter.next(); ruleResponsibilityVO.addDelegationRule(convertRuleDelegation(ruleDelegation)); } return ruleResponsibilityVO; } // public static KeyValuePair convertRuleResponsibilityVO(RuleResponsibilityVO ruleResponsibilityVO) throws // WorkflowException {} public static RuleDTO convertRule(RuleBaseValues ruleValues) throws WorkflowException { if (ruleValues == null) { return null; } RuleDTO rule = new RuleDTO(); rule.setActiveInd(ruleValues.getActiveInd()); rule.setDescription(ruleValues.getDescription()); rule.setDocTypeName(ruleValues.getDocTypeName()); rule.setFromDate(ruleValues.getFromDateString()); rule.setToDate(ruleValues.getToDateString()); rule.setIgnorePrevious(ruleValues.getIgnorePrevious()); rule.setRuleTemplateId(ruleValues.getRuleTemplateId()); rule.setRuleTemplateName(ruleValues.getRuleTemplateName()); // get keyPair values to setup RuleExtensionVOs for (Iterator iter = ruleValues.getRuleExtensions().iterator(); iter.hasNext();) { RuleExtension ruleExtension = (RuleExtension) iter.next(); rule.addRuleExtensions(convertRuleExtension(ruleExtension)); } // get keyPair values to setup RuleExtensionVOs for (Iterator iter = ruleValues.getResponsibilities().iterator(); iter.hasNext();) { RuleResponsibility ruleResponsibility = (RuleResponsibility) iter.next(); rule.addRuleResponsibility(convertRuleResponsibility(ruleResponsibility)); } return rule; } public static DocSearchCriteriaDTO convertDocumentSearchCriteriaDTO(DocumentSearchCriteriaDTO criteriaVO) throws WorkflowException { DocSearchCriteriaDTO criteria = new DocSearchCriteriaDTO(); criteria.setAppDocId(criteriaVO.getAppDocId()); criteria.setApprover(criteriaVO.getApprover()); criteria.setDocRouteStatus(criteriaVO.getDocRouteStatus()); criteria.setDocTitle(criteriaVO.getDocTitle()); criteria.setDocTypeFullName(criteriaVO.getDocTypeFullName()); criteria.setDocVersion(criteriaVO.getDocVersion()); criteria.setFromDateApproved(criteriaVO.getFromDateApproved()); criteria.setFromDateCreated(criteriaVO.getFromDateCreated()); criteria.setFromDateFinalized(criteriaVO.getFromDateFinalized()); criteria.setFromDateLastModified(criteriaVO.getFromDateLastModified()); criteria.setInitiator(criteriaVO.getInitiator()); criteria.setIsAdvancedSearch((criteriaVO.isAdvancedSearch()) ? DocSearchCriteriaDTO.ADVANCED_SEARCH_INDICATOR_STRING : "NO"); criteria.setSuperUserSearch((criteriaVO.isSuperUserSearch()) ? DocSearchCriteriaDTO.SUPER_USER_SEARCH_INDICATOR_STRING : "NO"); criteria.setRouteHeaderId(criteriaVO.getRouteHeaderId()); criteria.setViewer(criteriaVO.getViewer()); criteria.setWorkgroupViewerName(criteriaVO.getWorkgroupViewerName()); criteria.setToDateApproved(criteriaVO.getToDateApproved()); criteria.setToDateCreated(criteriaVO.getToDateCreated()); criteria.setToDateFinalized(criteriaVO.getToDateFinalized()); criteria.setToDateLastModified(criteriaVO.getToDateLastModified()); criteria.setThreshold(criteriaVO.getThreshold()); criteria.setSaveSearchForUser(criteriaVO.isSaveSearchForUser()); // generate the route node criteria if ( (StringUtils.isNotBlank(criteriaVO.getDocRouteNodeName())) && (StringUtils.isBlank(criteriaVO.getDocTypeFullName())) ) { throw new WorkflowException("No document type name specified when attempting to search by route node name '" + criteriaVO.getDocRouteNodeName() + "'"); } else if ( (StringUtils.isNotBlank(criteriaVO.getDocRouteNodeName())) && (StringUtils.isNotBlank(criteriaVO.getDocTypeFullName())) ) { criteria.setDocRouteNodeLogic(criteriaVO.getDocRouteNodeLogic()); List routeNodes = KEWServiceLocator.getRouteNodeService().getFlattenedNodes(getDocumentTypeByName(criteria.getDocTypeFullName()), true); boolean foundRouteNode = false; for (Iterator iterator = routeNodes.iterator(); iterator.hasNext();) { RouteNode routeNode = (RouteNode) iterator.next(); if (criteriaVO.getDocRouteNodeName().equals(routeNode.getRouteNodeName())) { foundRouteNode = true; break; } } if (!foundRouteNode) { throw new WorkflowException("Could not find route node name '" + criteriaVO.getDocRouteNodeName() + "' for document type name '" + criteriaVO.getDocTypeFullName() + "'"); } criteria.setDocRouteNodeId(criteriaVO.getDocRouteNodeName()); } // build a map of the search attributes passed in from the client creating lists where keys are duplicated HashMap<String, List<String>> searchAttributeValues = new HashMap<String,List<String>>(); for (KeyValueDTO keyValueVO : criteriaVO.getSearchAttributeValues()) { if (searchAttributeValues.containsKey(keyValueVO.getKey())) { searchAttributeValues.get(keyValueVO.getKey()).add(keyValueVO.getValue()); } else { searchAttributeValues.put(keyValueVO.getKey(), Arrays.asList(new String[]{keyValueVO.getValue()})); } } // build the list of SearchAttributeFormContainer objects List propertyFields = new ArrayList(); for (String key : searchAttributeValues.keySet()) { List<String> values = searchAttributeValues.get(key); SearchAttributeFormContainer container = null; if (values.size() == 1) { container = new SearchAttributeFormContainer(key, values.get(0)); } else if (values.size() > 1) { container = new SearchAttributeFormContainer(key, (String[])values.toArray()); } if (container != null) { propertyFields.add(container); } } DocSearchUtils.addSearchableAttributesToCriteria(criteria, propertyFields, true); return criteria; } private static DocumentType getDocumentTypeByName(String documentTypeName) { return KEWServiceLocator.getDocumentTypeService().findByName(documentTypeName); } public static DocumentSearchResultDTO convertDocumentSearchResultComponents(DocumentSearchResultComponents searchResult) throws WorkflowException { DocumentSearchResultDTO resultsVO = new DocumentSearchResultDTO(); resultsVO.setColumns(convertColumns(searchResult.getColumns())); resultsVO.setSearchResults(convertDocumentSearchResults(searchResult.getSearchResults())); return resultsVO; } private static List<DocumentSearchResultRowDTO> convertDocumentSearchResults(List<DocumentSearchResult> searchResults) throws WorkflowException { List<DocumentSearchResultRowDTO> rowVOs = new ArrayList<DocumentSearchResultRowDTO>(); for (DocumentSearchResult documentSearchResult : searchResults) { rowVOs.add(convertDocumentSearchResult(documentSearchResult)); } return rowVOs; } public static DocumentSearchResultRowDTO convertDocumentSearchResult(DocumentSearchResult resultRow) throws WorkflowException { DocumentSearchResultRowDTO rowVO = new DocumentSearchResultRowDTO(); List<KeyValueDTO> fieldValues = new ArrayList<KeyValueDTO>(); for (KeyValueSort keyValueSort : resultRow.getResultContainers()) { fieldValues.add(new KeyValueDTO(keyValueSort.getKey(),keyValueSort.getValue(),keyValueSort.getUserDisplayValue())); } rowVO.setFieldValues(fieldValues); return rowVO; } private static List<LookupableColumnDTO> convertColumns(List<Column> columns) throws WorkflowException { List<LookupableColumnDTO> columnVOs = new ArrayList<LookupableColumnDTO>(); for (Column column : columns) { columnVOs.add(convertColumn(column)); } return columnVOs; } public static LookupableColumnDTO convertColumn(Column column) throws WorkflowException { LookupableColumnDTO columnVO = new LookupableColumnDTO(); columnVO.setColumnTitle(column.getColumnTitle()); columnVO.setKey(column.getKey()); columnVO.setPropertyName(column.getPropertyName()); columnVO.setSortable(column.isSortable()); columnVO.setSortPropertyName(column.getSortPropertyName()); columnVO.setType(column.getType()); List<KeyValueDTO> displayParameters = new ArrayList<KeyValueDTO>(); for (String key : column.getDisplayParameters().keySet()) { displayParameters.add(new KeyValueDTO(key,column.getDisplayParameters().get(key))); } columnVO.setDisplayParameters(displayParameters); return null; } // public static RuleBaseValues convertRuleVO(RuleVO ruleVO) throws WorkflowException {} private static void handleException(String message, Exception e) throws WorkflowException { if (e instanceof RuntimeException) { throw (RuntimeException) e; } else if (e instanceof WorkflowException) { throw (WorkflowException) e; } throw new WorkflowException(message, e); } }
KULRICE-2395 Rename references to "messageEntity' to "serviceNamespace"
impl/src/main/java/org/kuali/rice/kew/dto/DTOConverter.java
KULRICE-2395 Rename references to "messageEntity' to "serviceNamespace"
<ide><path>mpl/src/main/java/org/kuali/rice/kew/dto/DTOConverter.java <ide> } <ide> <ide> // this is likely from an EDL validate call and ME may needed to be added to the AttDefinitionVO. <del> if (ruleAttribute.getMessageEntity() != null) { <del> definition.setMessageEntity(ruleAttribute.getMessageEntity()); <add> if (ruleAttribute.getServiceNamespace() != null) { <add> definition.setServiceNamespace(ruleAttribute.getServiceNamespace()); <ide> } else { <ide> // get the me from the document type if it's been passed in - the document is having action taken on it. <ide> if (documentType != null) { <del> definition.setMessageEntity(documentType.getMessageEntity()); <add> definition.setServiceNamespace(documentType.getServiceNamespace()); <ide> } <ide> } <ide>
Java
epl-1.0
c01a0ad8890ef5ebdd5c10aacef2a99b9b53b1c5
0
gnodet/wikitext
/******************************************************************************* * Copyright (c) 2004, 2007 Mylyn project committers and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html *******************************************************************************/ package org.eclipse.mylyn.tasks.tests; import java.lang.reflect.InvocationTargetException; import java.util.Arrays; import junit.framework.TestCase; import org.eclipse.core.internal.resources.ProjectDescription; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IProjectDescription; import org.eclipse.core.resources.IWorkspaceRoot; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.mylyn.internal.tasks.ui.workingsets.TaskWorkingSetUpdater; import org.eclipse.mylyn.resources.tests.ResourceTestUtil; import org.eclipse.mylyn.tasks.core.TaskList; import org.eclipse.mylyn.tasks.tests.connector.MockRepositoryQuery; import org.eclipse.mylyn.tasks.ui.TasksUiPlugin; import org.eclipse.ui.IWorkingSet; import org.eclipse.ui.IWorkingSetManager; import org.eclipse.ui.actions.WorkspaceModifyOperation; import org.eclipse.ui.internal.Workbench; public class TaskWorkingSetTest extends TestCase { private IProject project; private IWorkspaceRoot root; private IWorkingSet workingSet; private IWorkingSetManager workingSetManager; @Override protected void setUp() throws Exception { workingSetManager = Workbench.getInstance().getWorkingSetManager(); root = ResourcesPlugin.getWorkspace().getRoot(); } @Override protected void tearDown() throws Exception { if (workingSet != null) { workingSetManager.removeWorkingSet(workingSet); } if (project != null) { ResourceTestUtil.deleteProject(project); } } public void testDeleteQuery() { MockRepositoryQuery query = new MockRepositoryQuery("description"); TaskList taskList = TasksUiPlugin.getTaskListManager().getTaskList(); taskList.addQuery(query); workingSet = createWorkingSet(query); assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); TasksUiPlugin.getTaskListManager().getTaskList().deleteQuery(query); assertFalse(Arrays.asList(workingSet.getElements()).contains(query)); } public void testRenameQuery() { MockRepositoryQuery query = new MockRepositoryQuery("description"); TaskList taskList = TasksUiPlugin.getTaskListManager().getTaskList(); taskList.addQuery(query); workingSet = createWorkingSet(query); assertTrue(workingSet.getElements().length == 1); IAdaptable[] elements = workingSet.getElements(); assertTrue(elements.length == 1); assertTrue(elements[0] instanceof MockRepositoryQuery); assertTrue(((MockRepositoryQuery) elements[0]).getHandleIdentifier().equals("description")); assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); query.setHandleIdentifier("Test"); assertTrue(workingSet.getElements().length == 1); elements = workingSet.getElements(); assertTrue(elements.length == 1); assertTrue(elements[0] instanceof MockRepositoryQuery); assertTrue(((MockRepositoryQuery) elements[0]).getHandleIdentifier().equals("Test")); assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); } // XXX see bug 212962 // public void testRenameQuery() { // MockRepositoryQuery query = new MockRepositoryQuery("description"); // TaskList taskList = TasksUiPlugin.getTaskListManager().getTaskList(); // taskList.addQuery(query); // workingSet = createWorkingSet(query); // // TasksUiPlugin.getTaskListManager().getTaskList().deleteQuery(query); // query = new MockRepositoryQuery("newDescription"); // TasksUiPlugin.getTaskListManager().getTaskList().addQuery(query); // assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); // } // // public void testEditQuery() { // MockRepositoryQuery query = new MockRepositoryQuery("description"); // TaskList taskList = TasksUiPlugin.getTaskListManager().getTaskList(); // taskList.addQuery(query); // workingSet = createWorkingSet(query); // // TasksUiPlugin.getTaskListManager().getTaskList().deleteQuery(query); // TasksUiPlugin.getTaskListManager().getTaskList().addQuery(query); // assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); // } public void testRenameProject() throws Exception { createProject("Test Rename"); workingSet = createWorkingSet(project); WorkspaceModifyOperation op = new WorkspaceModifyOperation() { @Override protected void execute(IProgressMonitor monitor) throws CoreException, InvocationTargetException, InterruptedException { IProjectDescription description = new ProjectDescription(); description.setName("New Name"); project.move(description, true, new NullProgressMonitor()); } }; op.run(new NullProgressMonitor()); IProject oldProject = root.getProject("Test Rename"); IProject newProject = root.getProject("New Name"); assertFalse(Arrays.asList(workingSet.getElements()).contains(oldProject)); assertTrue(Arrays.asList(workingSet.getElements()).contains(newProject)); } private void createProject(String name) throws CoreException { project = root.getProject(name); project.create(null); project.open(null); } private IWorkingSet createWorkingSet(IAdaptable element) { IWorkingSet workingSet = workingSetManager.createWorkingSet("Task Working Set", new IAdaptable[] { element }); workingSet.setId(TaskWorkingSetUpdater.ID_TASK_WORKING_SET); assertTrue(Arrays.asList(workingSet.getElements()).contains(element)); workingSetManager.addWorkingSet(workingSet); return workingSet; } }
org.eclipse.mylyn.tasks.tests/src/org/eclipse/mylyn/tasks/tests/TaskWorkingSetTest.java
/******************************************************************************* * Copyright (c) 2004, 2007 Mylyn project committers and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html *******************************************************************************/ package org.eclipse.mylyn.tasks.tests; import java.lang.reflect.InvocationTargetException; import java.util.Arrays; import junit.framework.TestCase; import org.eclipse.core.internal.resources.ProjectDescription; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IProjectDescription; import org.eclipse.core.resources.IWorkspaceRoot; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.mylyn.internal.tasks.ui.workingsets.TaskWorkingSetUpdater; import org.eclipse.mylyn.resources.tests.ResourceTestUtil; import org.eclipse.ui.IWorkingSet; import org.eclipse.ui.IWorkingSetManager; import org.eclipse.ui.actions.WorkspaceModifyOperation; import org.eclipse.ui.internal.Workbench; public class TaskWorkingSetTest extends TestCase { private IProject project; private IWorkspaceRoot root; private IWorkingSet workingSet; private IWorkingSetManager workingSetManager; @Override protected void setUp() throws Exception { workingSetManager = Workbench.getInstance().getWorkingSetManager(); root = ResourcesPlugin.getWorkspace().getRoot(); } @Override protected void tearDown() throws Exception { if (workingSet != null) { workingSetManager.removeWorkingSet(workingSet); } if (project != null) { ResourceTestUtil.deleteProject(project); } } // XXX see bug 212962 // public void testRenameQuery() { // MockRepositoryQuery query = new MockRepositoryQuery("description"); // TaskList taskList = TasksUiPlugin.getTaskListManager().getTaskList(); // taskList.addQuery(query); // workingSet = createWorkingSet(query); // // TasksUiPlugin.getTaskListManager().getTaskList().deleteQuery(query); // query = new MockRepositoryQuery("newDescription"); // TasksUiPlugin.getTaskListManager().getTaskList().addQuery(query); // assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); // } // // public void testEditQuery() { // MockRepositoryQuery query = new MockRepositoryQuery("description"); // TaskList taskList = TasksUiPlugin.getTaskListManager().getTaskList(); // taskList.addQuery(query); // workingSet = createWorkingSet(query); // // TasksUiPlugin.getTaskListManager().getTaskList().deleteQuery(query); // TasksUiPlugin.getTaskListManager().getTaskList().addQuery(query); // assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); // } public void testRenameProject() throws Exception { createProject("Test Rename"); workingSet = createWorkingSet(project); WorkspaceModifyOperation op = new WorkspaceModifyOperation() { @Override protected void execute(IProgressMonitor monitor) throws CoreException, InvocationTargetException, InterruptedException { IProjectDescription description = new ProjectDescription(); description.setName("New Name"); project.move(description, true, new NullProgressMonitor()); } }; op.run(new NullProgressMonitor()); IProject oldProject = root.getProject("Test Rename"); IProject newProject = root.getProject("New Name"); assertFalse(Arrays.asList(workingSet.getElements()).contains(oldProject)); assertTrue(Arrays.asList(workingSet.getElements()).contains(newProject)); } private void createProject(String name) throws CoreException { project = root.getProject(name); project.create(null); project.open(null); } private IWorkingSet createWorkingSet(IAdaptable element) { IWorkingSet workingSet = workingSetManager.createWorkingSet("Task Working Set", new IAdaptable[] { element }); workingSet.setId(TaskWorkingSetUpdater.ID_TASK_WORKING_SET); assertTrue(Arrays.asList(workingSet.getElements()).contains(element)); workingSetManager.addWorkingSet(workingSet); return workingSet; } }
ASSIGNED - bug 221628: [patch] add tests for TaskWorkingSetTest https://bugs.eclipse.org/bugs/show_bug.cgi?id=221628
org.eclipse.mylyn.tasks.tests/src/org/eclipse/mylyn/tasks/tests/TaskWorkingSetTest.java
ASSIGNED - bug 221628: [patch] add tests for TaskWorkingSetTest https://bugs.eclipse.org/bugs/show_bug.cgi?id=221628
<ide><path>rg.eclipse.mylyn.tasks.tests/src/org/eclipse/mylyn/tasks/tests/TaskWorkingSetTest.java <ide> import org.eclipse.core.runtime.NullProgressMonitor; <ide> import org.eclipse.mylyn.internal.tasks.ui.workingsets.TaskWorkingSetUpdater; <ide> import org.eclipse.mylyn.resources.tests.ResourceTestUtil; <add>import org.eclipse.mylyn.tasks.core.TaskList; <add>import org.eclipse.mylyn.tasks.tests.connector.MockRepositoryQuery; <add>import org.eclipse.mylyn.tasks.ui.TasksUiPlugin; <ide> import org.eclipse.ui.IWorkingSet; <ide> import org.eclipse.ui.IWorkingSetManager; <ide> import org.eclipse.ui.actions.WorkspaceModifyOperation; <ide> if (project != null) { <ide> ResourceTestUtil.deleteProject(project); <ide> } <add> } <add> <add> public void testDeleteQuery() { <add> MockRepositoryQuery query = new MockRepositoryQuery("description"); <add> TaskList taskList = TasksUiPlugin.getTaskListManager().getTaskList(); <add> taskList.addQuery(query); <add> workingSet = createWorkingSet(query); <add> assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); <add> TasksUiPlugin.getTaskListManager().getTaskList().deleteQuery(query); <add> assertFalse(Arrays.asList(workingSet.getElements()).contains(query)); <add> } <add> <add> public void testRenameQuery() { <add> MockRepositoryQuery query = new MockRepositoryQuery("description"); <add> TaskList taskList = TasksUiPlugin.getTaskListManager().getTaskList(); <add> taskList.addQuery(query); <add> workingSet = createWorkingSet(query); <add> assertTrue(workingSet.getElements().length == 1); <add> IAdaptable[] elements = workingSet.getElements(); <add> assertTrue(elements.length == 1); <add> assertTrue(elements[0] instanceof MockRepositoryQuery); <add> assertTrue(((MockRepositoryQuery) elements[0]).getHandleIdentifier().equals("description")); <add> assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); <add> <add> query.setHandleIdentifier("Test"); <add> assertTrue(workingSet.getElements().length == 1); <add> elements = workingSet.getElements(); <add> assertTrue(elements.length == 1); <add> assertTrue(elements[0] instanceof MockRepositoryQuery); <add> assertTrue(((MockRepositoryQuery) elements[0]).getHandleIdentifier().equals("Test")); <add> assertTrue(Arrays.asList(workingSet.getElements()).contains(query)); <ide> } <ide> <ide> // XXX see bug 212962
JavaScript
mit
7504d48baa238a02225edb9ac1bb5c5a9d158d91
0
fjsantosb/Molecule
Molecule.module('Molecule.Game', function (require, p) { var MapFile = require('Molecule.MapFile'), Camera = require('Molecule.Camera'), Scene = require('Molecule.Scene'), Map = require('Molecule.Map'), ImageFile = require('Molecule.ImageFile'), AudioFile = require('Molecule.AudioFile'), Input = require('Molecule.Input'), Text = require('Molecule.Text'), physics = require('Molecule.Physics'), move = require('Molecule.Move'), calculateSpriteCollisions = require('Molecule.SpriteCollisions'), calculateMapCollisions = require('Molecule.MapCollisions'), Sprite = require('Molecule.Sprite'), SpriteSheetFile = require('Molecule.SpriteSheetFile'), Molecule = require('Molecule.Molecule'), utils = require('Molecule.utils'); p.init = null; // Objects defined inline // game.object.define('Something', {}); p.inlineMolecules = { }; p.updateGame = function () { }; p.update = function (_exit, game) { var sprite; for (var i = 0; i < game.scene.sprites.length; i++) { sprite = game.scene.sprites[i]; sprite.update(); sprite.flipUpdate(); if (sprite.animation !== null && _exit) sprite.animation.nextFrame(); } if (game.map) { game.map.update(); } }; p.loadResources = function (_interval, game) { var total = game.imageFile.data.length + game.mapFile.maps.length + game.audioFile.data.length + game.spriteSheetFile.data.length; var total_loaded = game.imageFile.counter + game.mapFile.getCounter() + game.audioFile.counter + game.spriteSheetFile.getCounter(); if (game.imageFile.isLoaded() && game.mapFile.isLoaded() && game.audioFile.isLoaded() && game.spriteSheetFile.isLoaded()) { clearInterval(_interval); for (var i = 0; i < game.scene.sprites.length; i++) { game.scene.sprites[i].getAnimation(); } p.init(); p.loop(game); } game.context.save(); game.context.fillStyle = '#f8f8f8'; game.context.fillRect(30, Math.round(game.height / 1.25), (game.width - (30 * 2)), 16); game.context.fillStyle = '#ea863a'; game.context.fillRect(30, Math.round(game.height / 1.25), (game.width - (30 * 2)) * (total_loaded / total), 16); game.context.restore(); }; p.removeSprites = function (sprites) { for (var i = sprites.length - 1; i >= 0; i--) { if (sprites[i].kill) { sprites.splice(i, 1); } } }; p.resetCollisionState = function (sprites) { var sprite; for (var i = 0; i < sprites.length; i++) { sprite = sprites[i]; sprite.collision.sprite.id = null; sprite.collision.sprite.left = false; sprite.collision.sprite.right = false; sprite.collision.sprite.up = false; sprite.collision.sprite.down = false; sprite.collision.map.tile = null; sprite.collision.map.left = false; sprite.collision.map.right = false; sprite.collision.map.up = false; sprite.collision.map.down = false; sprite.collision.boundaries.id = null; sprite.collision.boundaries.left = false; sprite.collision.boundaries.right = false; sprite.collision.boundaries.up = false; sprite.collision.boundaries.down = false; } }; p.updateMolecules = function (game) { var molecule; for (var i = 0; i < game.scene.molecules.length; i++) { molecule = game.scene.molecules[i]; if (molecule.update) molecule.update(); } }; p.updateDrawMolecules = function (game) { var molecule; for (var i = 0; i < game.scene.molecules.length; i++) { molecule = game.scene.molecules[i]; if (molecule.draw) molecule.draw(); } }; p.loop = function (game) { game.input.checkGamepad(); p.removeSprites(game.scene.sprites); p.updateMolecules(game); p.update(null, game); if (game.status == 1) { var exit = false; physics(game); p.resetCollisionState(game.scene.sprites); while (!exit) { exit = move(game.scene.sprites); p.checkBoundaries(game); calculateMapCollisions(game); calculateSpriteCollisions(game); p.updateSpriteCollisionCheck(game.scene.sprites); if (game.camera.type === 1) { game.camera.update(game.scene.sprites); } p.update(exit, game); game.resetMove(); } } p.draw(game); p.updateDrawMolecules(game); p.updateGame(); p.requestAnimFrame(function () { p.loop(game); }); }; p.updateSpriteCollisionCheck = function (sprites) { var sprite; for (var i = 0; i < sprites.length; i++) { sprite = sprites[i]; if (sprite.speed.check.x && sprite.speed.check.y) { sprite.resetMove(); } } }; p.checkBoundaries = function (game) { var sprite; for (var i = 0; i < game.scene.sprites.length; i++) { sprite = game.scene.sprites[i]; if (game.boundaries.x !== null && sprite.collides.boundaries) { if (sprite.position.x - sprite.anchor.x + sprite.frame.offset.x + sprite.move.x < game.boundaries.x) { sprite.position.x = game.boundaries.x + sprite.anchor.x - sprite.frame.offset.x; sprite.collision.boundaries.left = true; sprite.collision.boundaries.id = 0; sprite.move.x = 0; sprite.speed.x = 0; sprite.speed.t.x = 0; if (game.physics.gravity.x < 0) { sprite.speed.gravity.x = 0; } } if (sprite.position.x + sprite.frame.width - sprite.anchor.x - sprite.frame.offset.x + sprite.move.x > game.boundaries.x + game.boundaries.width) { sprite.position.x = game.boundaries.x + game.boundaries.width - sprite.frame.width + sprite.anchor.x + sprite.frame.offset.x; sprite.collision.boundaries.right = true; sprite.collision.boundaries.id = 1; sprite.move.x = 0; sprite.speed.x = 0; sprite.speed.t.x = 0; if (game.physics.gravity.x > 0) { sprite.speed.gravity.x = 0; } } } if (game.boundaries.y !== null && sprite.collides.boundaries) { if (sprite.position.y - sprite.anchor.y + sprite.frame.offset.y + sprite.move.y < game.boundaries.y) { sprite.position.y = game.boundaries.y + sprite.anchor.y - sprite.frame.offset.y; sprite.collision.boundaries.up = true; sprite.collision.boundaries.id = 2; sprite.move.y = 0; sprite.speed.y = 0; sprite.speed.t.y = 0; if (game.physics.gravity.y < 0) { sprite.speed.gravity.y = 0; } } if (sprite.position.y + sprite.frame.height - sprite.anchor.y - sprite.frame.offset.y + sprite.move.y > game.boundaries.y + game.boundaries.height) { sprite.position.y = game.boundaries.y + game.boundaries.height - sprite.frame.height + sprite.anchor.y + sprite.frame.offset.y; sprite.collision.boundaries.down = true; sprite.collision.boundaries.id = 3; sprite.move.y = 0; sprite.speed.y = 0; sprite.speed.t.y = 0; if (game.physics.gravity.y > 0) { sprite.speed.gravity.y = 0; } } } } }; p.draw = function (game) { var i; game.context.clearRect(0, 0, game.width, game.height); if (game.map && game.map.visible) { game.map.draw(false); } for (i = 0; i < game.scene.sprites.length; i++) { if (game.scene.sprites[i].visible) { game.scene.sprites[i].draw(false); } } for (i = 0; i < game.scene.sprites.length; i++) { if (game.scene.sprites[i].visible) { game.scene.sprites[i].draw(true); } } if (game.map && game.map.visible) { game.map.draw(true); } for (i = 0; i < game.scene.text.length; i++) { if (game.scene.text[i].visible) { game.scene.text[i].draw(); } } }; p.requestAnimFrame = (function () { var requestAnimFrame = window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.oRequestAnimationFrame || window.msRequestAnimationFrame || function (callback) { window.setTimeout(callback, 1000 / 60) }; return requestAnimFrame.bind(window); })(); p.start = function (game) { var interval = setInterval(function () { p.loadResources(interval, game); }, 100); }; p.propertiesMatch = function (obj, props) { var matches = true; if (!props) { return true; } for (var prop in props) { if (props.hasOwnProperty(prop) && obj[prop] !== props[prop]) { matches = false; } } return matches; }; p.timeouts = []; var Game = function (options) { // PROPERTIES this.canvas = null; this.context = null; this.next = {scene: null, fade: null}; this.status = 1; this.timer = {loop: 60 / 1000, previus: null, now: null, fps: 60, frame: 0}; this.sounds = {}; this.sprites = {}; this.tilemaps = {}; this.globals = options.globals || {}; this.node = options.node; // OPTIONS this.smooth = options.smooth || false; this.scale = options.scale || 1; this.width = options.width; this.height = options.height; this.canvas = document.createElement('canvas'); this.canvas.setAttribute('id', 'canvas'); this.context = this.canvas.getContext('2d'); var devicePixelRatio = window.devicePixelRatio || 1; var backingStoreRatio = this.context.webkitBackingStorePixelRatio || this.context.mozBackingStorePixelRatio || this.context.msBackingStorePixelRatio || this.context.oBackingStorePixelRatio || this.context.backingStorePixelRatio || 1; var ratio = devicePixelRatio / backingStoreRatio; // CANVAS this.canvas.width = options.width * ratio * this.scale; this.canvas.height = options.height * ratio * this.scale; this.canvas.style.width = (options.width * this.scale) + "px"; this.canvas.style.height = (options.height * this.scale) + "px"; this.context.scale(ratio * this.scale, ratio * this.scale); this.context.imageSmoothingEnabled = this.smooth; this.context.mozImageSmoothingEnabled = this.smooth; this.context.oImageSmoothingEnabled = this.smooth; this.context.webkitImageSmoothingEnabled = this.smooth; this.context.msImageSmoothingEnabled = this.smooth; // GAME COMPONENTS this.camera = new Camera(this); this.scene = new Scene(this); this.map = new Map(this); this.input = new Input(this); // ASSET LOADING this.imageFile = new ImageFile(this); this.audioFile = new AudioFile(this); this.mapFile = new MapFile(this); this.spriteSheetFile = new SpriteSheetFile(this); // GAME SETTINGS this.physics = {gravity: {x: 0, y: 0}, friction: {x: 0, y: 0}}; this.boundaries = {x: null, y: null, width: null, height: null}; // BINDERS utils.bindMethods(this.molecule, this); utils.bindMethods(this.sprite, this); utils.bindMethods(this.text, this); utils.bindMethods(this.tilemap, this); utils.bindMethods(this.audio, this); this.node ? document.getElementById(this.node).appendChild(this.canvas) : document.body.appendChild(this.canvas); }; Game.prototype.audio = { create: function (_id) { if (utils.isString(_id) && this.sounds[_id]) { return this.sounds[_id].clone(); } else { throw new Error('No audio loaded with the name ' + _id); } } }; // TODO: Should not be able to add objects more than once Game.prototype.add = function (obj) { if (arguments.length === 0 || arguments.length > 1 || typeof arguments[0] === 'string') { throw new Error('You can only add a single sprite, Molecule Object or text, use respective game.sprite.add, game.object.add and game.text.add'); } if (obj instanceof Molecule) { return this.molecule.add(obj); } if (obj instanceof Sprite) { return this.sprite.add(obj) } if (obj instanceof Text) { return this.text.add(obj); } if (typeof obj === 'function') { // Constructor return this.molecule.add(obj); } throw new Error('You did not pass sprite, Molecule Object or text'); }; Game.prototype.get = function () { return { sprites: this.scene.sprites, molecules: this.scene.molecules, text: this.scene.text }; }; Game.prototype.remove = function (obj) { if (arguments.length === 0 || arguments.length > 1) { throw new Error('You can only remove a single sprite, Molecule Object or text'); } if (obj instanceof Molecule) { return this.molecule.remove(obj); } if (obj instanceof Sprite) { return this.sprite.remove(obj) } if (obj instanceof Text) { return this.text.remove(obj); } throw new Error('You did not pass sprite, Molecule Object or text'); }; Game.prototype.is = function (obj, type) { return obj._MoleculeType === type; }; // Not in use, remove? Game.prototype.updateTimer = function () { this.timer.frame++; this.timer.now = new Date().getTime(); if (this.timer.previus !== null) this.timer.loop = (this.timer.now - this.timer.previus) / 1000; if (this.timer.now - this.timer.previus >= 1000) { this.timer.previus = this.timer.now; this.timer.fps = this.timer.frame; this.timer.frame = 0; } }; Game.prototype.play = function () { this.status = 1; }; Game.prototype.stop = function () { this.status = 0; }; Game.prototype.resetMove = function () { for (var i = 0; i < this.scene.sprites.length; i++) { this.scene.sprites[i].resetMove(); } if (this.map) { this.map.resetScroll(); } p.update(null, this); }; Game.prototype.cameraUpdate = function (_exit) { for (var i = 0; i < this.scene.sprites.length; i++) { this.scene.sprites[i].update(); this.scene.sprites[i].flipUpdate(); if (this.scene.sprites[i].animation !== null && _exit) this.scene.sprites[i].animation.nextFrame(); } if (this.map !== null) this.map.update(); }; Game.prototype.start = function () { p.start(this); }; Game.prototype.init = function (initializeModules, callback) { var self = this, object; p.init = function () { initializeModules(); // If callback is a string, require a module if (utils.isString(callback)) { object = require(callback); } else { // Callback might return an object (using ready method) object = callback.call(self.globals, self, require); } // If we have a Molecule Object constructor, add it to the game if (typeof object === 'function') { self.add(object); } } }; Game.prototype.update = function (callback) { p.updateGame = callback.bind(this.globals, this, require); }; // All methods are bound to game object Game.prototype.molecule = { define: function () { var name = arguments.length > 1 ? arguments[0] : null, options = arguments.length === 1 ? arguments[0] : arguments[1], Obj = Molecule.extend.call(Molecule, options); // No name means it is coming from a module if (!name) { return Obj; } if (!p.inlineMolecules[name]) { p.inlineMolecules[name] = Obj; } else { throw new Error(name + ' already exists as an object'); } return Obj; }, create: function () { var name = arguments[0], options = arguments[1], Obj, obj; // If passing a constructor if (typeof arguments[0] === 'function') { return new arguments[0](arguments[1]); } if (p.inlineMolecules[name]) { Obj = p.inlineMolecules[name]; } else { Obj = require(name); } obj = new Obj(options); obj._MoleculeType = name; return obj; }, add: function () { var obj; if (typeof arguments[0] === 'string') { obj = this.molecule.create(arguments[0], arguments[1]); } else if (utils.isMolecule(arguments[0])) { obj = arguments[0]; } else if (typeof arguments[0] === 'function') { // constructor obj = this.molecule.create(arguments[0], arguments[1]); } else { throw new Error('Wrong parameters, need a string or Molecule Object'); } this.scene.molecules.push(obj); if (obj.text) { for (var text in obj.text) { if (obj.text.hasOwnProperty(text)) { this.scene.text.push(obj.text[text]); } } } if (obj.sprite) { this.scene.sprites.push(obj.sprite); } else if (obj.sprites) { for (var sprite in obj.sprites) { if (obj.sprites.hasOwnProperty(sprite) && obj.sprites[sprite]) { this.scene.sprites.push(obj.sprites[sprite]); } } } return obj; }, get: function () { var options; if (!arguments.length) { return this.scene.molecules; } if (typeof arguments[0] === 'string') { options = arguments[1] || {}; options._MoleculeType = arguments[0]; return utils.find(this.scene.molecules, options); } else { return utils.find(this.scene.molecules, arguments[0]); } }, remove: function () { var moleculesToRemove = arguments[0] instanceof Molecule ? [arguments[0]] : this.molecule.get.apply(this, arguments), game = this; moleculesToRemove.forEach(function (obj) { obj.removeListeners(); game.scene.molecules.splice(game.scene.molecules.indexOf(obj), 1); if (obj.sprite) { game.scene.sprites.splice(game.scene.sprites.indexOf(obj.sprite), 1); } else if (obj.sprites) { for (var sprite in obj.sprites) { if (obj.sprites.hasOwnProperty(sprite) && obj.sprites[sprite]) { game.scene.sprites.splice(game.scene.sprites.indexOf(obj.sprites[sprite]), 1); } } } if (obj.text) { for (var text in obj.text) { if (obj.text.hasOwnProperty(text)) { game.scene.text.splice(game.scene.text.indexOf(obj.text[text]), 1); } } } if (obj.audio) { for (var audio in obj.audio) { if (obj.audio.hasOwnProperty(audio)) { obj.audio[audio].stop(); } } } }); } }; // All methods are bound to game object Game.prototype.sprite = { create: function (_id) { var loadedSprite, sprite; if (this.sprites[_id]) { loadedSprite = this.sprites[_id]; sprite = loadedSprite.clone(); } else { throw new Error('Sprite ' + _id + ' does not exist. Has it been loaded?'); } return sprite; }, add: function () { var sprite; if (typeof arguments[0] === 'string') { sprite = this.sprite.create(arguments[0]); } else if (utils.isSprite(arguments[0])) { sprite = arguments[0]; } else { throw new Error('Wrong parameters, need a string or sprite'); } this.scene.sprites.push(sprite); return sprite; }, get: function () { var options; if (!arguments.length) { return this.scene.sprites; } if (typeof arguments[0] === 'string') { options = { name: arguments[0] }; return utils.find(this.scene.sprites, options); } else { return utils.find(this.scene.sprites, arguments[0]); } }, remove: function () { var spritesToRemove = arguments[0] instanceof Sprite ? [arguments[0]] : this.sprite.get.apply(this, arguments), game = this; spritesToRemove.forEach(function (sprite) { game.scene.sprites.splice(game.scene.sprites.indexOf(sprite), 1); }); } }; // All methods are bound to game object Game.prototype.text = { create: function (options) { var t = new Text(options, this); return t; }, add: function () { var text; if (utils.isText(arguments[0])) { text = arguments[0]; } else if (utils.isObject(arguments[0])) { text = this.text.create(arguments[0]); } else { throw new Error('Wrong parameters, need a new object or existing Text object'); } this.scene.text.push(text); return text; }, get: function () { if (!arguments.length) { return this.scene.text; } return utils.find(this.scene.text, arguments[0]); }, remove: function () { var textToRemove = arguments[0] instanceof Text ? [arguments[0]] : this.text.get.apply(this, arguments), game = this; textToRemove.forEach(function (text) { game.scene.text.splice(game.scene.text.indexOf(text), 1); }); } }; // All methods are bound to game object Game.prototype.tilemap = { set: function () { var tilemap = this.tilemaps[arguments[0]] || arguments[0], self = this; if (tilemap && utils.isTilemap(tilemap)) { if (this.map && this.map.molecules.length) { this.map.molecules.forEach(function (object) { self.remove(object) }); } this.mapFile.set(tilemap); } else { throw new Error('There is no tilemap with the name ' + arguments[0] + ' loaded'); } }, get: function () { return this.map; }, remove: function () { var self = this; if (this.map && this.map.molecules.length) { this.map.molecules.forEach(function (object) { self.remove(object) }); } this.map = null; } }; Game.prototype.trigger = function () { var type = arguments[0], args = Array.prototype.slice.call(arguments, 0), event; args.splice(0, 1); if (!document.createEvent) { event = new CustomEvent(type, { detail: args }); } else { event = document.createEvent('CustomEvent'); event.initCustomEvent(type, true, true, args); } window.dispatchEvent(event); }; Game.prototype.timeout = function (func, ms, context) { var funcString = func.toString(); if (p.timeouts.indexOf(funcString) === -1) { setTimeout(function () { p.timeouts.splice(p.timeouts.indexOf(funcString), 1); func.call(context); }, ms); p.timeouts.push(funcString); } }; // Game.prototype.cancelRequestAnimFrame = (function () { // return window.cancelAnimationFrame || window.webkitCancelRequestAnimationFrame || window.mozCancelRequestAnimationFrame || window.oCancelRequestAnimationFrame || window.msCancelRequestAnimationFrame || clearTimeout // })(); return Game; });
src/game.js
Molecule.module('Molecule.Game', function (require, p) { var MapFile = require('Molecule.MapFile'), Camera = require('Molecule.Camera'), Scene = require('Molecule.Scene'), Map = require('Molecule.Map'), ImageFile = require('Molecule.ImageFile'), AudioFile = require('Molecule.AudioFile'), Input = require('Molecule.Input'), Text = require('Molecule.Text'), physics = require('Molecule.Physics'), move = require('Molecule.Move'), calculateSpriteCollisions = require('Molecule.SpriteCollisions'), calculateMapCollisions = require('Molecule.MapCollisions'), Sprite = require('Molecule.Sprite'), SpriteSheetFile = require('Molecule.SpriteSheetFile'), Molecule = require('Molecule.Molecule'), utils = require('Molecule.utils'); p.init = null; // Objects defined inline // game.object.define('Something', {}); p.inlineMolecules = { }; p.updateGame = function () { }; p.update = function (_exit, game) { var sprite; for (var i = 0; i < game.scene.sprites.length; i++) { sprite = game.scene.sprites[i]; sprite.update(); sprite.flipUpdate(); if (sprite.animation !== null && _exit) sprite.animation.nextFrame(); } if (game.map) { game.map.update(); } }; p.loadResources = function (_interval, game) { var total = game.imageFile.data.length + game.mapFile.maps.length + game.audioFile.data.length + game.spriteSheetFile.data.length; var total_loaded = game.imageFile.counter + game.mapFile.getCounter() + game.audioFile.counter + game.spriteSheetFile.getCounter(); if (game.imageFile.isLoaded() && game.mapFile.isLoaded() && game.audioFile.isLoaded() && game.spriteSheetFile.isLoaded()) { clearInterval(_interval); for (var i = 0; i < game.scene.sprites.length; i++) { game.scene.sprites[i].getAnimation(); } p.init(); p.loop(game); } game.context.save(); game.context.fillStyle = '#f8f8f8'; game.context.fillRect(30, Math.round(game.height / 1.25), (game.width - (30 * 2)), 16); game.context.fillStyle = '#ea863a'; game.context.fillRect(30, Math.round(game.height / 1.25), (game.width - (30 * 2)) * (total_loaded / total), 16); game.context.restore(); }; p.removeSprites = function (sprites) { for (var i = sprites.length - 1; i >= 0; i--) { if (sprites[i].kill) { sprites.splice(i, 1); } } }; p.resetCollisionState = function (sprites) { var sprite; for (var i = 0; i < sprites.length; i++) { sprite = sprites[i]; sprite.collision.sprite.id = null; sprite.collision.sprite.left = false; sprite.collision.sprite.right = false; sprite.collision.sprite.up = false; sprite.collision.sprite.down = false; sprite.collision.map.tile = null; sprite.collision.map.left = false; sprite.collision.map.right = false; sprite.collision.map.up = false; sprite.collision.map.down = false; sprite.collision.boundaries.id = null; sprite.collision.boundaries.left = false; sprite.collision.boundaries.right = false; sprite.collision.boundaries.up = false; sprite.collision.boundaries.down = false; } }; p.updateMolecules = function (game) { var molecule; for (var i = 0; i < game.scene.molecules.length; i++) { molecule = game.scene.molecules[i]; if (molecule.update) molecule.update(); } }; p.loop = function (game) { game.input.checkGamepad(); p.removeSprites(game.scene.sprites); p.update(null, game); if (game.status == 1) { var exit = false; physics(game); p.resetCollisionState(game.scene.sprites); while (!exit) { exit = move(game.scene.sprites); p.checkBoundaries(game); calculateMapCollisions(game); calculateSpriteCollisions(game); p.updateSpriteCollisionCheck(game.scene.sprites); if (game.camera.type === 1) { game.camera.update(game.scene.sprites); } p.update(exit, game); game.resetMove(); } } p.draw(game); p.updateMolecules(game); p.updateGame(); p.requestAnimFrame(function () { p.loop(game); }); }; p.updateSpriteCollisionCheck = function (sprites) { var sprite; for (var i = 0; i < sprites.length; i++) { sprite = sprites[i]; if (sprite.speed.check.x && sprite.speed.check.y) { sprite.resetMove(); } } }; p.checkBoundaries = function (game) { var sprite; for (var i = 0; i < game.scene.sprites.length; i++) { sprite = game.scene.sprites[i]; if (game.boundaries.x !== null && sprite.collides.boundaries) { if (sprite.position.x - sprite.anchor.x + sprite.frame.offset.x + sprite.move.x < game.boundaries.x) { sprite.position.x = game.boundaries.x + sprite.anchor.x - sprite.frame.offset.x; sprite.collision.boundaries.left = true; sprite.collision.boundaries.id = 0; sprite.move.x = 0; sprite.speed.x = 0; sprite.speed.t.x = 0; if (game.physics.gravity.x < 0) { sprite.speed.gravity.x = 0; } } if (sprite.position.x + sprite.frame.width - sprite.anchor.x - sprite.frame.offset.x + sprite.move.x > game.boundaries.x + game.boundaries.width) { sprite.position.x = game.boundaries.x + game.boundaries.width - sprite.frame.width + sprite.anchor.x + sprite.frame.offset.x; sprite.collision.boundaries.right = true; sprite.collision.boundaries.id = 1; sprite.move.x = 0; sprite.speed.x = 0; sprite.speed.t.x = 0; if (game.physics.gravity.x > 0) { sprite.speed.gravity.x = 0; } } } if (game.boundaries.y !== null && sprite.collides.boundaries) { if (sprite.position.y - sprite.anchor.y + sprite.frame.offset.y + sprite.move.y < game.boundaries.y) { sprite.position.y = game.boundaries.y + sprite.anchor.y - sprite.frame.offset.y; sprite.collision.boundaries.up = true; sprite.collision.boundaries.id = 2; sprite.move.y = 0; sprite.speed.y = 0; sprite.speed.t.y = 0; if (game.physics.gravity.y < 0) { sprite.speed.gravity.y = 0; } } if (sprite.position.y + sprite.frame.height - sprite.anchor.y - sprite.frame.offset.y + sprite.move.y > game.boundaries.y + game.boundaries.height) { sprite.position.y = game.boundaries.y + game.boundaries.height - sprite.frame.height + sprite.anchor.y + sprite.frame.offset.y; sprite.collision.boundaries.down = true; sprite.collision.boundaries.id = 3; sprite.move.y = 0; sprite.speed.y = 0; sprite.speed.t.y = 0; if (game.physics.gravity.y > 0) { sprite.speed.gravity.y = 0; } } } } }; p.draw = function (game) { var i; game.context.clearRect(0, 0, game.width, game.height); if (game.map && game.map.visible) { game.map.draw(false); } for (i = 0; i < game.scene.sprites.length; i++) { if (game.scene.sprites[i].visible) { game.scene.sprites[i].draw(false); } } for (i = 0; i < game.scene.sprites.length; i++) { if (game.scene.sprites[i].visible) { game.scene.sprites[i].draw(true); } } if (game.map && game.map.visible) { game.map.draw(true); } for (i = 0; i < game.scene.text.length; i++) { if (game.scene.text[i].visible) { game.scene.text[i].draw(); } } }; p.requestAnimFrame = (function () { var requestAnimFrame = window.requestAnimationFrame || window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.oRequestAnimationFrame || window.msRequestAnimationFrame || function (callback) { window.setTimeout(callback, 1000 / 60) }; return requestAnimFrame.bind(window); })(); p.start = function (game) { var interval = setInterval(function () { p.loadResources(interval, game); }, 100); }; p.propertiesMatch = function (obj, props) { var matches = true; if (!props) { return true; } for (var prop in props) { if (props.hasOwnProperty(prop) && obj[prop] !== props[prop]) { matches = false; } } return matches; }; p.timeouts = []; var Game = function (options) { // PROPERTIES this.canvas = null; this.context = null; this.next = {scene: null, fade: null}; this.status = 1; this.timer = {loop: 60 / 1000, previus: null, now: null, fps: 60, frame: 0}; this.sounds = {}; this.sprites = {}; this.tilemaps = {}; this.globals = options.globals || {}; this.node = options.node; // OPTIONS this.smooth = options.smooth || false; this.scale = options.scale || 1; this.width = options.width; this.height = options.height; this.canvas = document.createElement('canvas'); this.canvas.setAttribute('id', 'canvas'); this.context = this.canvas.getContext('2d'); var devicePixelRatio = window.devicePixelRatio || 1; var backingStoreRatio = this.context.webkitBackingStorePixelRatio || this.context.mozBackingStorePixelRatio || this.context.msBackingStorePixelRatio || this.context.oBackingStorePixelRatio || this.context.backingStorePixelRatio || 1; var ratio = devicePixelRatio / backingStoreRatio; // CANVAS this.canvas.width = options.width * ratio * this.scale; this.canvas.height = options.height * ratio * this.scale; this.canvas.style.width = (options.width * this.scale) + "px"; this.canvas.style.height = (options.height * this.scale) + "px"; this.context.scale(ratio * this.scale, ratio * this.scale); this.context.imageSmoothingEnabled = this.smooth; this.context.mozImageSmoothingEnabled = this.smooth; this.context.oImageSmoothingEnabled = this.smooth; this.context.webkitImageSmoothingEnabled = this.smooth; this.context.msImageSmoothingEnabled = this.smooth; // GAME COMPONENTS this.camera = new Camera(this); this.scene = new Scene(this); this.map = new Map(this); this.input = new Input(this); // ASSET LOADING this.imageFile = new ImageFile(this); this.audioFile = new AudioFile(this); this.mapFile = new MapFile(this); this.spriteSheetFile = new SpriteSheetFile(this); // GAME SETTINGS this.physics = {gravity: {x: 0, y: 0}, friction: {x: 0, y: 0}}; this.boundaries = {x: null, y: null, width: null, height: null}; // BINDERS utils.bindMethods(this.molecule, this); utils.bindMethods(this.sprite, this); utils.bindMethods(this.text, this); utils.bindMethods(this.tilemap, this); utils.bindMethods(this.audio, this); this.node ? document.getElementById(this.node).appendChild(this.canvas) : document.body.appendChild(this.canvas); }; Game.prototype.audio = { create: function (_id) { if (utils.isString(_id) && this.sounds[_id]) { return this.sounds[_id].clone(); } else { throw new Error('No audio loaded with the name ' + _id); } } }; // TODO: Should not be able to add objects more than once Game.prototype.add = function (obj) { if (arguments.length === 0 || arguments.length > 1 || typeof arguments[0] === 'string') { throw new Error('You can only add a single sprite, Molecule Object or text, use respective game.sprite.add, game.object.add and game.text.add'); } if (obj instanceof Molecule) { return this.molecule.add(obj); } if (obj instanceof Sprite) { return this.sprite.add(obj) } if (obj instanceof Text) { return this.text.add(obj); } if (typeof obj === 'function') { // Constructor return this.molecule.add(obj); } throw new Error('You did not pass sprite, Molecule Object or text'); }; Game.prototype.get = function () { return { sprites: this.scene.sprites, molecules: this.scene.molecules, text: this.scene.text }; }; Game.prototype.remove = function (obj) { if (arguments.length === 0 || arguments.length > 1) { throw new Error('You can only remove a single sprite, Molecule Object or text'); } if (obj instanceof Molecule) { return this.molecule.remove(obj); } if (obj instanceof Sprite) { return this.sprite.remove(obj) } if (obj instanceof Text) { return this.text.remove(obj); } throw new Error('You did not pass sprite, Molecule Object or text'); }; Game.prototype.is = function (obj, type) { return obj._MoleculeType === type; }; // Not in use, remove? Game.prototype.updateTimer = function () { this.timer.frame++; this.timer.now = new Date().getTime(); if (this.timer.previus !== null) this.timer.loop = (this.timer.now - this.timer.previus) / 1000; if (this.timer.now - this.timer.previus >= 1000) { this.timer.previus = this.timer.now; this.timer.fps = this.timer.frame; this.timer.frame = 0; } }; Game.prototype.play = function () { this.status = 1; }; Game.prototype.stop = function () { this.status = 0; }; Game.prototype.resetMove = function () { for (var i = 0; i < this.scene.sprites.length; i++) { this.scene.sprites[i].resetMove(); } if (this.map) { this.map.resetScroll(); } p.update(null, this); }; Game.prototype.cameraUpdate = function (_exit) { for (var i = 0; i < this.scene.sprites.length; i++) { this.scene.sprites[i].update(); this.scene.sprites[i].flipUpdate(); if (this.scene.sprites[i].animation !== null && _exit) this.scene.sprites[i].animation.nextFrame(); } if (this.map !== null) this.map.update(); }; Game.prototype.start = function () { p.start(this); }; Game.prototype.init = function (initializeModules, callback) { var self = this, object; p.init = function () { initializeModules(); // If callback is a string, require a module if (utils.isString(callback)) { object = require(callback); } else { // Callback might return an object (using ready method) object = callback.call(self.globals, self, require); } // If we have a Molecule Object constructor, add it to the game if (typeof object === 'function') { self.add(object); } } }; Game.prototype.update = function (callback) { p.updateGame = callback.bind(this.globals, this, require); }; // All methods are bound to game object Game.prototype.molecule = { define: function () { var name = arguments.length > 1 ? arguments[0] : null, options = arguments.length === 1 ? arguments[0] : arguments[1], Obj = Molecule.extend.call(Molecule, options); // No name means it is coming from a module if (!name) { return Obj; } if (!p.inlineMolecules[name]) { p.inlineMolecules[name] = Obj; } else { throw new Error(name + ' already exists as an object'); } return Obj; }, create: function () { var name = arguments[0], options = arguments[1], Obj, obj; // If passing a constructor if (typeof arguments[0] === 'function') { return new arguments[0](arguments[1]); } if (p.inlineMolecules[name]) { Obj = p.inlineMolecules[name]; } else { Obj = require(name); } obj = new Obj(options); obj._MoleculeType = name; return obj; }, add: function () { var obj; if (typeof arguments[0] === 'string') { obj = this.molecule.create(arguments[0], arguments[1]); } else if (utils.isMolecule(arguments[0])) { obj = arguments[0]; } else if (typeof arguments[0] === 'function') { // constructor obj = this.molecule.create(arguments[0], arguments[1]); } else { throw new Error('Wrong parameters, need a string or Molecule Object'); } this.scene.molecules.push(obj); if (obj.text) { for (var text in obj.text) { if (obj.text.hasOwnProperty(text)) { this.scene.text.push(obj.text[text]); } } } if (obj.sprite) { this.scene.sprites.push(obj.sprite); } else if (obj.sprites) { for (var sprite in obj.sprites) { if (obj.sprites.hasOwnProperty(sprite) && obj.sprites[sprite]) { this.scene.sprites.push(obj.sprites[sprite]); } } } return obj; }, get: function () { var options; if (!arguments.length) { return this.scene.molecules; } if (typeof arguments[0] === 'string') { options = arguments[1] || {}; options._MoleculeType = arguments[0]; return utils.find(this.scene.molecules, options); } else { return utils.find(this.scene.molecules, arguments[0]); } }, remove: function () { var moleculesToRemove = arguments[0] instanceof Molecule ? [arguments[0]] : this.molecule.get.apply(this, arguments), game = this; moleculesToRemove.forEach(function (obj) { obj.removeListeners(); game.scene.molecules.splice(game.scene.molecules.indexOf(obj), 1); if (obj.sprite) { game.scene.sprites.splice(game.scene.sprites.indexOf(obj.sprite), 1); } else if (obj.sprites) { for (var sprite in obj.sprites) { if (obj.sprites.hasOwnProperty(sprite) && obj.sprites[sprite]) { game.scene.sprites.splice(game.scene.sprites.indexOf(obj.sprites[sprite]), 1); } } } if (obj.text) { for (var text in obj.text) { if (obj.text.hasOwnProperty(text)) { game.scene.text.splice(game.scene.text.indexOf(obj.text[text]), 1); } } } if (obj.audio) { for (var audio in obj.audio) { if (obj.audio.hasOwnProperty(audio)) { obj.audio[audio].stop(); } } } }); } }; // All methods are bound to game object Game.prototype.sprite = { create: function (_id) { var loadedSprite, sprite; if (this.sprites[_id]) { loadedSprite = this.sprites[_id]; sprite = loadedSprite.clone(); } else { throw new Error('Sprite ' + _id + ' does not exist. Has it been loaded?'); } return sprite; }, add: function () { var sprite; if (typeof arguments[0] === 'string') { sprite = this.sprite.create(arguments[0]); } else if (utils.isSprite(arguments[0])) { sprite = arguments[0]; } else { throw new Error('Wrong parameters, need a string or sprite'); } this.scene.sprites.push(sprite); return sprite; }, get: function () { var options; if (!arguments.length) { return this.scene.sprites; } if (typeof arguments[0] === 'string') { options = { name: arguments[0] }; return utils.find(this.scene.sprites, options); } else { return utils.find(this.scene.sprites, arguments[0]); } }, remove: function () { var spritesToRemove = arguments[0] instanceof Sprite ? [arguments[0]] : this.sprite.get.apply(this, arguments), game = this; spritesToRemove.forEach(function (sprite) { game.scene.sprites.splice(game.scene.sprites.indexOf(sprite), 1); }); } }; // All methods are bound to game object Game.prototype.text = { create: function (options) { var t = new Text(options, this); return t; }, add: function () { var text; if (utils.isText(arguments[0])) { text = arguments[0]; } else if (utils.isObject(arguments[0])) { text = this.text.create(arguments[0]); } else { throw new Error('Wrong parameters, need a new object or existing Text object'); } this.scene.text.push(text); return text; }, get: function () { if (!arguments.length) { return this.scene.text; } return utils.find(this.scene.text, arguments[0]); }, remove: function () { var textToRemove = arguments[0] instanceof Text ? [arguments[0]] : this.text.get.apply(this, arguments), game = this; textToRemove.forEach(function (text) { game.scene.text.splice(game.scene.text.indexOf(text), 1); }); } }; // All methods are bound to game object Game.prototype.tilemap = { set: function () { var tilemap = this.tilemaps[arguments[0]] || arguments[0], self = this; if (tilemap && utils.isTilemap(tilemap)) { if (this.map && this.map.molecules.length) { this.map.molecules.forEach(function (object) { self.remove(object) }); } this.mapFile.set(tilemap); } else { throw new Error('There is no tilemap with the name ' + arguments[0] + ' loaded'); } }, get: function () { return this.map; }, remove: function () { var self = this; if (this.map && this.map.molecules.length) { this.map.molecules.forEach(function (object) { self.remove(object) }); } this.map = null; } }; Game.prototype.trigger = function () { var type = arguments[0], args = Array.prototype.slice.call(arguments, 0), event; args.splice(0, 1); if (!document.createEvent) { event = new CustomEvent(type, { detail: args }); } else { event = document.createEvent('CustomEvent'); event.initCustomEvent(type, true, true, args); } window.dispatchEvent(event); }; Game.prototype.timeout = function (func, ms, context) { var funcString = func.toString(); if (p.timeouts.indexOf(funcString) === -1) { setTimeout(function () { p.timeouts.splice(p.timeouts.indexOf(funcString), 1); func.call(context); }, ms); p.timeouts.push(funcString); } }; // Game.prototype.cancelRequestAnimFrame = (function () { // return window.cancelAnimationFrame || window.webkitCancelRequestAnimationFrame || window.mozCancelRequestAnimationFrame || window.oCancelRequestAnimationFrame || window.msCancelRequestAnimationFrame || clearTimeout // })(); return Game; });
New Molecule method: draw
src/game.js
New Molecule method: draw
<ide><path>rc/game.js <ide> if (molecule.update) molecule.update(); <ide> } <ide> }; <add> <add> p.updateDrawMolecules = function (game) { <add> var molecule; <add> for (var i = 0; i < game.scene.molecules.length; i++) { <add> molecule = game.scene.molecules[i]; <add> if (molecule.draw) molecule.draw(); <add> } <add> }; <ide> <ide> p.loop = function (game) { <ide> game.input.checkGamepad(); <ide> p.removeSprites(game.scene.sprites); <add> p.updateMolecules(game); <ide> p.update(null, game); <ide> if (game.status == 1) { <ide> var exit = false; <ide> } <ide> } <ide> p.draw(game); <del> p.updateMolecules(game); <add> p.updateDrawMolecules(game); <ide> p.updateGame(); <ide> <ide> p.requestAnimFrame(function () {
Java
unlicense
error: pathspec 'p012/p12.java' did not match any file(s) known to git
b5c7b8d872966d17dfeff6d281dd5bb65bfff4b9
1
metacity/project-euler,metacity/project-euler,metacity/project-euler
import java.util.concurrent.*; import java.util.concurrent.atomic.*; public class p12 { private static final int THREADS = Runtime.getRuntime().availableProcessors(); private static final BlockingQueue<Long> queue = new ArrayBlockingQueue<Long>(THREADS * 2); private static final ExecutorService pool = Executors.newCachedThreadPool(); private static final AtomicBoolean foundIt = new AtomicBoolean(false); private static final int LIMIT = 500; private static long start; public static void main(String[] args) throws InterruptedException { setupThreads(); start = System.nanoTime(); long i = 0; long prev = 0; while (!foundIt.get()) { long number = prev + (++i); queue.put(number); prev = number; } } private static final void setupThreads() { System.out.println("Computing with " + THREADS + " threads..."); for (int i = 0; i < THREADS; ++i) { pool.execute(new Runnable() { public void run() { try { while (!foundIt.get()) { Long number = queue.take(); if (numberOfDivisors(number) > LIMIT) { foundIt.set(true); System.out.println(number); System.out.println((System.nanoTime() - start)/(double)1E6 + " milliseconds"); System.exit(0); } } } catch (InterruptedException iex) { System.out.println(iex.toString()); } } }); } } private static final int numberOfDivisors(long number) { int divisors = 2; for (long i = number - 1; i > 1; --i) { if (number % i == 0) ++divisors; } return divisors; } }
p012/p12.java
Added Problem 12
p012/p12.java
Added Problem 12
<ide><path>012/p12.java <add>import java.util.concurrent.*; <add>import java.util.concurrent.atomic.*; <add> <add>public class p12 { <add> <add> private static final int THREADS = Runtime.getRuntime().availableProcessors(); <add> <add> private static final BlockingQueue<Long> queue = new ArrayBlockingQueue<Long>(THREADS * 2); <add> private static final ExecutorService pool = Executors.newCachedThreadPool(); <add> private static final AtomicBoolean foundIt = new AtomicBoolean(false); <add> <add> private static final int LIMIT = 500; <add> <add> private static long start; <add> <add> public static void main(String[] args) throws InterruptedException { <add> setupThreads(); <add> <add> start = System.nanoTime(); <add> long i = 0; <add> long prev = 0; <add> while (!foundIt.get()) { <add> long number = prev + (++i); <add> queue.put(number); <add> prev = number; <add> } <add> } <add> <add> private static final void setupThreads() { <add> System.out.println("Computing with " + THREADS + " threads..."); <add> for (int i = 0; i < THREADS; ++i) { <add> pool.execute(new Runnable() { <add> public void run() { <add> try { <add> while (!foundIt.get()) { <add> Long number = queue.take(); <add> if (numberOfDivisors(number) > LIMIT) { <add> foundIt.set(true); <add> System.out.println(number); <add> System.out.println((System.nanoTime() - start)/(double)1E6 + " milliseconds"); <add> System.exit(0); <add> } <add> } <add> } catch (InterruptedException iex) { <add> System.out.println(iex.toString()); <add> } <add> } <add> }); <add> <add> } <add> <add> } <add> <add> private static final int numberOfDivisors(long number) { <add> int divisors = 2; <add> for (long i = number - 1; i > 1; --i) { <add> if (number % i == 0) ++divisors; <add> } <add> return divisors; <add> } <add>} <add>
JavaScript
mit
8c3690b8f145c1cf8b400b41105f0711dc45babc
0
tomviner/unlimited-weeks-in-google-calendar,tomviner/unlimited-weeks-in-google-calendar,tomviner/unlimited-weeks-in-google-calendar
/* jshint esversion: 6 */ let BUTTON_CHECKED = 'goog-imageless-button-checked' let BUTTON_CONTENT = 'goog-imageless-button-content' function trigger(event_names, elem) { // event_names: space sep names of events // elem: jQuery element if (!event_names || event_names.length === 0) { throw (`Cannot trigger ${event_names}, element event_names`) } if (!elem || elem.length === 0) { throw (`Cannot trigger ${event_names}, element missing`) } for (let event_name of event_names.split(' ')) { let evt = document.createEvent("MouseEvents") // eek, there's gotta be a better way. // replace with https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/MouseEvent // https://developer.mozilla.org/en-US/docs/Web/Guide/Events/Creating_and_triggering_events evt.initMouseEvent(event_name, true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null) let dom_elem = elem.get(0) dom_elem.dispatchEvent(evt) } return elem } class Toolbar { get buttons() { return $('#topRightNavigation .goog-imageless-button') } get month_view() { return this.buttons.eq(2) } get custom_view() { return this.buttons.eq(3) } get prev_month() { return $('.navBack').eq(0) } get today() { return $('#todayButton\\:1,#todayButton\\:2').children().eq(0) } get next_month() { return $('#dp_0_next') } get is_custom_view_active() { return this.custom_view.is(`.${BUTTON_CHECKED}`) } poll_custom_button_visibility(wait_ms = 500) { if ( this.custom_view.is(":visible") && this.buttons.filter(`.${BUTTON_CHECKED}`).is(":visible") ) { $(document).trigger("custom_view_buttons_visible") } else { setTimeout((w) => this.poll_custom_button_visibility(w), wait_ms) } } inject_buttons() { toolbar.custom_view.after( function() { return $(this) .clone() .addClass('gcal-unlim-weeks-adjust-weeks') .addClass('gcal-unlim-weeks-remove-weeks') .find('.goog-imageless-button-content') .text('-') .end() .click(() => unlimited_weeks.remove_week()) } ).after( function() { return $(this) .clone() .addClass('gcal-unlim-weeks-adjust-weeks') .addClass('gcal-unlim-weeks-add-weeks') .find('.goog-imageless-button-content') .text('+') .end() .click(() => unlimited_weeks.add_week()) } ) $('.gcal-unlim-weeks-adjust-weeks') .removeClass(BUTTON_CHECKED) // replicate button behavior .mousedown(function(){$(this).addClass('goog-imageless-button-focused')}) .mouseup(function(){$(this).removeClass('goog-imageless-button-focused')}) .mouseenter(function(){$(this).addClass('goog-imageless-button-hover')}) .mouseleave(function(){$(this).removeClass('goog-imageless-button-hover')}) if (this.is_custom_view_active) { unlimited_weeks.restore_weeks() } else { unlimited_weeks.load_num_weeks().then( function(num_weeks) { unlimited_weeks.write_custom_button_label(num_weeks) }) } this.custom_view.click(() => { unlimited_weeks.restore_weeks() }) this.buttons.mousedown(() => { this.buttons.removeClass(BUTTON_CHECKED) }) } } class BigCal { get first_day_num() { if (!$('#gridcontainer span[class^="ca-cdp"]').is(':visible')) { // no grid, must be in agenda mode trigger('mousedown mouseup', toolbar.month_view) } return parseInt( // take a fresh look, in case it's only just appeared $('#gridcontainer span[class^="ca-cdp"]') .attr('class') .split('ca-cdp') .slice(-1)[0] ) } get num_weeks() { return $('.month-row').length } } class MiniCal { constructor(height = 6) { // in week rows this.height = height } get cells() { return $('.dp-cell[class*="dp-o"]') } nth(n) { return this.cells.eq(n) } get first() { return this.nth(0) } get last() { return this.nth(7 * this.height - 1) } extract_day_num(el) { // Google Calendar seems to label each day with a monotonic number // that skips in an unknown way return parseInt(el.eq(0).attr('id').split('_').slice(-1)[0]) } get first_day_num() { return this.extract_day_num(this.first) } get last_day_num() { return this.extract_day_num(this.last) } get month_start_indexes() { // return the positions of the 1st of the current // and next months return this.cells.map((i, el) => { if ($(el).text() === '1') { return i } else { return null } }) } get current_month_start_index() { return this.month_start_indexes[0] } get next_month_start_index() { return this.month_start_indexes[1] } // current month may start in either first or second row get current_month_starts_high() { return this.current_month_start_index < 7 } // next month may start in either last or penultimate row get next_month_starts_low() { return this.next_month_start_index >= 7 * (this.height - 1) } get weeks_in_month() { // bools get cast to 0 or 1 here. each true is an extra week return 3 + this.current_month_starts_high + this.next_month_starts_low } cell_from_day_num(day_num) { return this.cells.filter(`[id$="${day_num}"]`) } month_backward() { trigger('mousedown mouseup', $('.dp-sb-prev')) } month_forward() { trigger('mousedown mouseup', $('.dp-sb-next')) } navigate_to_day_num(day_num) { let i = 0 while (day_num < this.first_day_num || this.last_day_num < day_num) { if (++i > 10) { throw "Too many iterations" } if (day_num < this.first_day_num) { this.month_backward() } else if (this.last_day_num < day_num) { this.month_forward() } else { throw 'unknown condition' } } if (this.cell_from_day_num(day_num).length != 1) { throw "target not found on mini cal" } } } class UnlimitedWeeks { add_week() { $('.gcal-unlim-weeks-add-weeks').addClass(BUTTON_CHECKED) this.alter_weeks(+1) } remove_week() { $('.gcal-unlim-weeks-remove-weeks').addClass(BUTTON_CHECKED) this.alter_weeks(-1) } get can_persist() { return chrome && chrome.storage && chrome.storage.sync } save_num_weeks() { if (this.can_persist) { chrome.storage.sync.set({ 'num_weeks': big_cal.num_weeks }) } } load_num_weeks() { // returns a promise if (!this.can_persist) { return new Promise((resolve) => resolve(big_cal.num_weeks)) } return new Promise(function(resolve){ chrome.storage.sync.get('num_weeks', function(data) { if ( $.isEmptyObject(data) || typeof data.num_weeks === 'number' || typeof data.num_weeks >= 2 ) { return resolve(data.num_weeks) } else { return resolve(big_cal.num_weeks) } }) }) } restore_weeks() { this.load_num_weeks().then( num_weeks => this.display_weeks(num_weeks) ) } allocate_weeks(weeks_left) { while (weeks_left > 0) { let weeks_in_month = mini_cal.weeks_in_month if (weeks_in_month > weeks_left) { break } weeks_left -= weeks_in_month mini_cal.month_forward() } if (weeks_left > 0) { return 7 * weeks_left } return 0 } get_start_cell() { let index = mini_cal.current_month_start_index + 7 return mini_cal.nth(index) } get_end_cell(days_remaining) { let index = days_remaining + mini_cal.current_month_start_index return mini_cal.nth(index) } move_weeks(start_day_num) { // move the calandar back to the date it started at // move active range forward, out the way mini_cal.month_forward() // we must click outside the active range, otherwise, we just select a single day trigger('mousedown mouseup', mini_cal.last) // now click the date we want, in the mini map mini_cal.navigate_to_day_num(start_day_num) trigger('mousedown mouseup', mini_cal.cell_from_day_num(start_day_num)) } write_custom_button_label(num_weeks=null) { toolbar.custom_view .find(`.${BUTTON_CONTENT}`) .text(`${num_weeks || big_cal.num_weeks} weeks`) } alter_weeks(delta) { if (toolbar.is_custom_view_active) { return this.display_weeks(big_cal.num_weeks + delta) } let that = this this.load_num_weeks().then(function(num_weeks) { that.display_weeks(num_weeks + delta) }) } display_weeks(weeks_left) { if (weeks_left < 2) { weeks_left = 2 } let target_start_day_num = big_cal.first_day_num // move to custom view, click doesn't work here trigger('mousedown mouseup', toolbar.custom_view) // ensure start date in visible in mini cal mini_cal.navigate_to_day_num(target_start_day_num) // do a double manoeuvre: click next month during a click drag over the mini calendar. // this is how we reach more than one month trigger('mousedown', this.get_start_cell()) let days_remaining = this.allocate_weeks(weeks_left) trigger('mousemove mouseup', this.get_end_cell(days_remaining)) this.write_custom_button_label() this.move_weeks(target_start_day_num) // preserve number of weeks for next page (re)load this.save_num_weeks() toolbar.custom_view.addClass(BUTTON_CHECKED) $('.gcal-unlim-weeks-adjust-weeks').removeClass(BUTTON_CHECKED) } } let demo = false let mini_cal = new MiniCal() let big_cal = new BigCal() let toolbar = new Toolbar() let unlimited_weeks = new UnlimitedWeeks() $(document) .on("custom_view_buttons_visible", function() { toolbar.inject_buttons() if (demo === true) { console.log('demo') setTimeout(unlimited_weeks.add_week, 1000) setTimeout(unlimited_weeks.add_week, 1500) setTimeout(unlimited_weeks.remove_week, 3000) } }) $(document).ready( function() { // triggers custom_view_buttons_visible event toolbar.poll_custom_button_visibility() })
src/inject/inject.js
/* jshint esversion: 6 */ let BUTTON_CHECKED = 'goog-imageless-button-checked' let BUTTON_CONTENT = 'goog-imageless-button-content' function trigger(event_names, elem) { // event_names: space sep names of events // elem: jQuery element if (!event_names || event_names.length === 0) { throw (`Cannot trigger ${event_names}, element event_names`) } if (!elem || elem.length === 0) { throw (`Cannot trigger ${event_names}, element missing`) } for (let event_name of event_names.split(' ')) { let evt = document.createEvent("MouseEvents") // eek, there's gotta be a better way. // replace with https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/MouseEvent // https://developer.mozilla.org/en-US/docs/Web/Guide/Events/Creating_and_triggering_events evt.initMouseEvent(event_name, true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null) let dom_elem = elem.get(0) dom_elem.dispatchEvent(evt) } return elem } class Toolbar { get buttons() { return $('#topRightNavigation .goog-imageless-button') } get month_view() { return this.buttons.eq(2) } get custom_view() { return this.buttons.eq(3) } get prev_month() { return $('.navBack').eq(0) } get today() { return $('#todayButton\\:1,#todayButton\\:2').children().eq(0) } get next_month() { return $('#dp_0_next') } get is_custom_view_active() { return this.custom_view.is(`.${BUTTON_CHECKED}`) } poll_custom_button_visibility(wait_ms = 500) { if ( this.custom_view.is(":visible") && this.buttons.filter(`.${BUTTON_CHECKED}`).is(":visible") ) { $(document).trigger("custom_view_buttons_visible") } else { setTimeout((w) => this.poll_custom_button_visibility(w), wait_ms) } } inject_buttons() { toolbar.custom_view.after( function() { return $(this) .clone() .addClass('gcal-unlim-weeks-adjust-weeks') .addClass('gcal-unlim-weeks-remove-weeks') .find('.goog-imageless-button-content') .text('-') .end() .click(() => unlimited_weeks.remove_week()) } ).after( function() { return $(this) .clone() .addClass('gcal-unlim-weeks-adjust-weeks') .addClass('gcal-unlim-weeks-add-weeks') .find('.goog-imageless-button-content') .text('+') .end() .click(() => unlimited_weeks.add_week()) } ) $('.gcal-unlim-weeks-adjust-weeks') .removeClass(BUTTON_CHECKED) // replicate button behavior .mousedown(function(){$(this).addClass('goog-imageless-button-focused')}) .mouseup(function(){$(this).removeClass('goog-imageless-button-focused')}) .mouseenter(function(){$(this).addClass('goog-imageless-button-hover')}) .mouseleave(function(){$(this).removeClass('goog-imageless-button-hover')}) if (this.is_custom_view_active) { unlimited_weeks.restore_weeks() } else { unlimited_weeks.load_num_weeks().then( function(num_weeks) { unlimited_weeks.write_custom_button_label(num_weeks) }) } this.custom_view.click(() => { unlimited_weeks.restore_weeks() }) this.buttons.mousedown(() => { this.buttons.removeClass(BUTTON_CHECKED) }) } } class BigCal { get first_day_num() { if (!$('#gridcontainer span[class^="ca-cdp"]').is(':visible')) { // no grid, must be in agenda mode trigger('mousedown mouseup', toolbar.month_view) } return parseInt( // take a fresh look, in case it's only just appeared $('#gridcontainer span[class^="ca-cdp"]') .attr('class') .split('ca-cdp') .slice(-1)[0] ) } get num_weeks() { return $('.month-row').length } } class MiniCal { constructor(height = 6) { // in week rows this.height = height } get cells() { return $('.dp-cell[class*="dp-o"]') } nth(n) { return this.cells.eq(n) } get first() { return this.nth(0) } get last() { return this.nth(7 * this.height - 1) } extract_day_num(el) { // Google Calendar seems to label each day with a monotonic number // that skips in an unknown way return parseInt(el.eq(0).attr('id').split('_').slice(-1)[0]) } get first_day_num() { return this.extract_day_num(this.first) } get last_day_num() { return this.extract_day_num(this.last) } get month_start_indexes() { // return the positions of the 1st of the current // and next months return this.cells.map((i, el) => { if ($(el).text() === '1') { return i } else { return null } }) } get current_month_start_index() { return this.month_start_indexes[0] } get next_month_start_index() { return this.month_start_indexes[1] } // current month may start in either first or second row get current_month_starts_high() { return this.current_month_start_index < 7 } // next month may start in either last or penultimate row get next_month_starts_low() { return this.next_month_start_index >= 7 * (this.height - 1) } get weeks_in_month() { // bools get cast to 0 or 1 here. each true is an extra week return 3 + this.current_month_starts_high + this.next_month_starts_low } cell_from_day_num(day_num) { return this.cells.filter(`[id$="${day_num}"]`) } month_backward() { trigger('mousedown mouseup', $('.dp-sb-prev')) } month_forward() { trigger('mousedown mouseup', $('.dp-sb-next')) } navigate_to_day_num(day_num) { let i = 0 while (day_num < this.first_day_num || this.last_day_num < day_num) { if (++i > 10) { throw "Too many iterations" } if (day_num < this.first_day_num) { this.month_backward() } else if (this.last_day_num < day_num) { this.month_forward() } else { throw 'unknown condition' } } if (this.cell_from_day_num(day_num).length != 1) { throw "target not found on mini cal" } } } class UnlimitedWeeks { add_week() { $('.gcal-unlim-weeks-add-weeks').addClass(BUTTON_CHECKED) this.alter_weeks(+1) } remove_week() { $('.gcal-unlim-weeks-remove-weeks').addClass(BUTTON_CHECKED) this.alter_weeks(-1) } get can_persist() { return chrome && chrome.storage && chrome.storage.sync } save_num_weeks() { if (this.can_persist) { chrome.storage.sync.set({ 'num_weeks': big_cal.num_weeks }) } } load_num_weeks() { // returns a promise if (!this.can_persist) { return new Promise((resolve) => resolve(null)) } return new Promise(function(resolve){ chrome.storage.sync.get('num_weeks', function(data) { if ( $.isEmptyObject(data) || typeof data.num_weeks === 'number' || typeof data.num_weeks >= 2 ) { return resolve(data.num_weeks) } else { return resolve(big_cal.num_weeks) } }) }) } restore_weeks() { this.load_num_weeks().then( num_weeks => this.display_weeks(num_weeks) ) } allocate_weeks(weeks_left) { while (weeks_left > 0) { let weeks_in_month = mini_cal.weeks_in_month if (weeks_in_month > weeks_left) { break } weeks_left -= weeks_in_month mini_cal.month_forward() } if (weeks_left > 0) { return 7 * weeks_left } return 0 } get_start_cell() { let index = mini_cal.current_month_start_index + 7 return mini_cal.nth(index) } get_end_cell(days_remaining) { let index = days_remaining + mini_cal.current_month_start_index return mini_cal.nth(index) } move_weeks(start_day_num) { // move the calandar back to the date it started at // move active range forward, out the way mini_cal.month_forward() // we must click outside the active range, otherwise, we just select a single day trigger('mousedown mouseup', mini_cal.last) // now click the date we want, in the mini map mini_cal.navigate_to_day_num(start_day_num) trigger('mousedown mouseup', mini_cal.cell_from_day_num(start_day_num)) } write_custom_button_label(num_weeks=null) { toolbar.custom_view .find(`.${BUTTON_CONTENT}`) .text(`${num_weeks || big_cal.num_weeks} weeks`) } alter_weeks(delta) { if (toolbar.is_custom_view_active) { return this.display_weeks(big_cal.num_weeks + delta) } let that = this this.load_num_weeks().then(function(num_weeks) { that.display_weeks(num_weeks + delta) }) } display_weeks(weeks_left) { if (weeks_left < 2) { weeks_left = 2 } let target_start_day_num = big_cal.first_day_num // move to custom view, click doesn't work here trigger('mousedown mouseup', toolbar.custom_view) // ensure start date in visible in mini cal mini_cal.navigate_to_day_num(target_start_day_num) // do a double manoeuvre: click next month during a click drag over the mini calendar. // this is how we reach more than one month trigger('mousedown', this.get_start_cell()) let days_remaining = this.allocate_weeks(weeks_left) trigger('mousemove mouseup', this.get_end_cell(days_remaining)) this.write_custom_button_label() this.move_weeks(target_start_day_num) // preserve number of weeks for next page (re)load this.save_num_weeks() toolbar.custom_view.addClass(BUTTON_CHECKED) $('.gcal-unlim-weeks-adjust-weeks').removeClass(BUTTON_CHECKED) } } let demo = false let mini_cal = new MiniCal() let big_cal = new BigCal() let toolbar = new Toolbar() let unlimited_weeks = new UnlimitedWeeks() $(document) .on("custom_view_buttons_visible", function() { toolbar.inject_buttons() if (demo === true) { console.log('demo') setTimeout(unlimited_weeks.add_week, 1000) setTimeout(unlimited_weeks.add_week, 1500) setTimeout(unlimited_weeks.remove_week, 3000) } }) $(document).ready( function() { // triggers custom_view_buttons_visible event toolbar.poll_custom_button_visibility() })
load_num_weeks's promise always returns an int
src/inject/inject.js
load_num_weeks's promise always returns an int
<ide><path>rc/inject/inject.js <ide> load_num_weeks() { <ide> // returns a promise <ide> if (!this.can_persist) { <del> return new Promise((resolve) => resolve(null)) <add> return new Promise((resolve) => resolve(big_cal.num_weeks)) <ide> } <ide> return new Promise(function(resolve){ <ide> chrome.storage.sync.get('num_weeks', function(data) {
Java
mit
01b52b1cf196a12fafb70c21cfeccb35c90265c2
0
algolia/algoliasearch-client-android,algolia/algoliasearch-client-android,algoliareadmebot/algoliasearch-client-android,algoliareadmebot/algoliasearch-client-android,algolia/algoliasearch-client-android
/* * Copyright (c) 2012-2016 Algolia * http://www.algolia.com/ * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.algolia.search.saas; import android.os.AsyncTask; import android.util.Log; import com.algolia.search.saas.listeners.SearchListener; import com.algolia.search.sdk.LocalIndex; import com.algolia.search.sdk.SearchResults; import org.json.JSONObject; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; /** * An online index that can also be mirrored locally. * * @note Requires Algolia's SDK. */ public class MirroredIndex extends Index { private LocalIndex localIndex; private boolean mirrored; private MirrorSettings mirrorSettings = new MirrorSettings(); private long delayBetweenSyncs = 1000 * 60 * 60; // 1 hour private boolean syncing; private File tmpDir; private File settingsFile; private List<File> objectFiles; protected MirroredIndex(OfflineAPIClient client, String indexName) { super(client, indexName); } // ---------------------------------------------------------------------- // Accessors // ---------------------------------------------------------------------- public OfflineAPIClient getClient() { return (OfflineAPIClient)super.getClient(); } public boolean isMirrored() { return mirrored; } public void setMirrored(boolean mirrored) { if (!this.mirrored && mirrored) { loadMirroSettings(); } this.mirrored = mirrored; } public void addDataSelectionQuery(Query query) { Query tweakedQuery = new Query(query); final List<String> emptyList = new ArrayList<String>(); tweakedQuery.setAttributesToHighlight(emptyList); tweakedQuery.setAttributesToSnippet(emptyList); tweakedQuery.getRankingInfo(false); mirrorSettings.addQuery(tweakedQuery.getQueryString()); mirrorSettings.setQueriesModificationDate(new Date()); saveMirrorSettings(); } public String[] getDataSelectionQueries() { return mirrorSettings.getQueries(); } public long getDelayBetweenSyncs() { return delayBetweenSyncs; } public void setDelayBetweenSyncs(long delayBetweenSyncs) { this.delayBetweenSyncs = delayBetweenSyncs; } /** * Lazy instantiate the local index. */ protected void ensureLocalIndex() { if (localIndex == null) { localIndex = new LocalIndex(getClient().getRootDataDir().getAbsolutePath(), getClient().getApplicationID(), getIndexName()); } } private File getTempDir() { // TODO: Use better value return getClient().getRootDataDir(); } private File getDataDir() { return new File(new File(getClient().getRootDataDir(), getClient().getApplicationID()), getIndexName()); } private File getSettingsFile() { return new File(getDataDir(), "mirror.json"); } // ---------------------------------------------------------------------- // Settings // ---------------------------------------------------------------------- private void saveMirrorSettings() { mirrorSettings.save(getSettingsFile()); } private void loadMirroSettings() { File settingsFile = getSettingsFile(); if (settingsFile.exists()) { mirrorSettings.load(settingsFile); } } // ---------------------------------------------------------------------- // Sync // ---------------------------------------------------------------------- public void sync() { synchronized (this) { if (syncing) return; syncing = true; } getClient().buildExecutorService.submit(new Runnable() { @Override public void run() { _sync(); } }); } public void syncIfNeeded() { long currentDate = System.currentTimeMillis(); if (currentDate - mirrorSettings.getLastSyncDate().getTime() > delayBetweenSyncs || mirrorSettings.getQueriesModificationDate().compareTo(mirrorSettings.getLastSyncDate()) > 0) { sync(); } } /** * Refresh the local mirror. * @warning Should be called from a background thread. */ private void _sync() { if (!mirrored) throw new IllegalArgumentException("Mirroring not activated on this index"); try { // Create temporary directory. tmpDir = new File(getTempDir(), UUID.randomUUID().toString()); tmpDir.mkdirs(); // TODO: We are doing everything sequentially so far. // Fetch settings. { JSONObject settingsJSON = this.getSettings(); settingsFile = new File(tmpDir, "settings.json"); String data = settingsJSON.toString(); Writer writer = new OutputStreamWriter(new FileOutputStream(settingsFile), "UTF-8"); writer.write(data); writer.close(); } // Perform data selection queries. objectFiles = new ArrayList<>(); final String[] queries = mirrorSettings.getQueries(); for (int i = 0; i < queries.length; ++i) { String query = queries[i]; JSONObject objectsJSON = getClient().getRequest("/1/indexes/" + getEncodedIndexName() + "?" + query, true); File file = new File(tmpDir, String.format("%d.json", i)); objectFiles.add(file); String data = objectsJSON.toString(); Writer writer = new OutputStreamWriter(new FileOutputStream(file), "UTF-8"); writer.write(data); writer.close(); } // Build the index. ensureLocalIndex(); String[] objectFilePaths = new String[objectFiles.size()]; for (int i = 0; i < objectFiles.size(); ++i) objectFilePaths[i] = objectFiles.get(i).getAbsolutePath(); int status = localIndex.build(settingsFile.getAbsolutePath(), objectFilePaths); if (status != 200) { throw new AlgoliaException("Build index failed", status); } // Remember the last sync date. mirrorSettings.setLastSyncDate(new Date()); saveMirrorSettings(); } catch (Exception e) { Log.e(this.getClass().getName(), "Sync failed", e); } finally { // Clean up. if (tmpDir != null) { FileUtils.deleteRecursive(tmpDir); tmpDir = null; } settingsFile = null; objectFiles = null; // Mark sync as finished. synchronized (this) { syncing = false; } } } // ---------------------------------------------------------------------- // Search // ---------------------------------------------------------------------- public void searchASync(Query query, SearchListener listener) { new SearchMirrorTask().execute(new TaskParams.Search(listener, query)); } private class SearchMirrorTask extends AsyncTask<TaskParams.Search, Void, TaskParams.Search> { private SearchListener listener; private Query query; @Override protected TaskParams.Search doInBackground(TaskParams.Search... params) { TaskParams.Search p = params[0]; listener = p.listener; query = p.query; // First search the online API. try { p.content = search(p.query); } catch (AlgoliaException e) { // Fallback to the offline mirror if available. if (mirrored) { try { p.content = _searchMirror(query.getQueryString()); } catch (AlgoliaException e2) { p.error = e2; } } else { p.error = e; } } return p; } @Override protected void onPostExecute(TaskParams.Search p) { p.sendResult(MirroredIndex.this); } } private JSONObject _searchMirror(String query) throws AlgoliaException { if (!mirrored) throw new IllegalArgumentException("Mirroring not activated on this index"); try { ensureLocalIndex(); SearchResults searchResults = localIndex.search(query); if (searchResults.statusCode == 200) { String jsonString = new String(searchResults.data, "UTF-8"); JSONObject json = new JSONObject(jsonString); return json; } else { throw new AlgoliaException(searchResults.errorMessage, searchResults.statusCode); } } catch (Exception e) { throw new AlgoliaException("Search failed", e); } } }
algoliasearch/src/offline/java/com/algolia/search/saas/MirroredIndex.java
/* * Copyright (c) 2012-2016 Algolia * http://www.algolia.com/ * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.algolia.search.saas; import android.os.AsyncTask; import android.util.Log; import com.algolia.search.saas.listeners.SearchListener; import com.algolia.search.sdk.LocalIndex; import com.algolia.search.sdk.SearchResults; import org.json.JSONObject; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; /** * An online index that can also be mirrored locally. * * @note Requires Algolia's SDK. */ public class MirroredIndex extends Index { private LocalIndex localIndex; private boolean mirrored; private MirrorSettings mirrorSettings = new MirrorSettings(); private long delayBetweenSyncs = 1000 * 60 * 60; // 1 hour private boolean syncing; private File tmpDir; private File settingsFile; private List<File> objectFiles; protected MirroredIndex(OfflineAPIClient client, String indexName) { super(client, indexName); } // ---------------------------------------------------------------------- // Accessors // ---------------------------------------------------------------------- public OfflineAPIClient getClient() { return (OfflineAPIClient)super.getClient(); } public boolean isMirrored() { return mirrored; } public void setMirrored(boolean mirrored) { if (!this.mirrored && mirrored) { loadMirroSettings(); } this.mirrored = mirrored; } public void addDataSelectionQuery(Query query) { Query tweakedQuery = new Query(query); final List<String> emptyList = new ArrayList<String>(); tweakedQuery.setAttributesToHighlight(emptyList); tweakedQuery.setAttributesToSnippet(emptyList); tweakedQuery.getRankingInfo(false); mirrorSettings.addQuery(tweakedQuery.getQueryString()); mirrorSettings.setQueriesModificationDate(new Date()); saveMirrorSettings(); } public String[] getDataSelectionQueries() { return mirrorSettings.getQueries(); } public long getDelayBetweenSyncs() { return delayBetweenSyncs; } public void setDelayBetweenSyncs(long delayBetweenSyncs) { this.delayBetweenSyncs = delayBetweenSyncs; } /** * Lazy instantiate the local index. */ protected void ensureLocalIndex() { if (localIndex == null) { localIndex = new LocalIndex(getClient().getRootDataDir().getAbsolutePath(), getClient().getApplicationID(), getIndexName()); } } private File getTempDir() { // TODO: Use better value return getClient().getRootDataDir(); } private File getDataDir() { return new File(new File(getClient().getRootDataDir(), getClient().getApplicationID()), getIndexName()); } private File getSettingsFile() { return new File(getDataDir(), "mirror.json"); } // ---------------------------------------------------------------------- // Settings // ---------------------------------------------------------------------- private void saveMirrorSettings() { mirrorSettings.save(getSettingsFile()); } private void loadMirroSettings() { File settingsFile = getSettingsFile(); if (settingsFile.exists()) { mirrorSettings.load(settingsFile); } } // ---------------------------------------------------------------------- // Sync // ---------------------------------------------------------------------- public void sync() { synchronized (this) { if (syncing) return; syncing = true; } getClient().buildExecutorService.submit(new Runnable() { @Override public void run() { _sync(); } }); } public void syncIfNeeded() { long currentDate = System.currentTimeMillis(); if (currentDate - mirrorSettings.getLastSyncDate().getTime() > delayBetweenSyncs || mirrorSettings.getQueriesModificationDate().compareTo(mirrorSettings.getLastSyncDate()) > 0) { sync(); } } /** * Refresh the local mirror. * @warning Should be called from a background thread. */ private void _sync() { if (!mirrored) throw new IllegalArgumentException("Mirroring not activated on this index"); try { // Create temporary directory. tmpDir = new File(getTempDir(), UUID.randomUUID().toString()); tmpDir.mkdirs(); // TODO: We are doing everything sequentially so far. // Fetch settings. { JSONObject settingsJSON = this.getSettings(); settingsFile = new File(tmpDir, "settings.json"); String data = settingsJSON.toString(); Writer writer = new OutputStreamWriter(new FileOutputStream(settingsFile), "UTF-8"); writer.write(data); writer.close(); } // Perform data selection queries. objectFiles = new ArrayList<>(); final String[] queries = mirrorSettings.getQueries(); for (int i = 0; i < queries.length; ++i) { String query = queries[i]; JSONObject objectsJSON = getClient().getRequest("/1/indexes/" + getEncodedIndexName() + "?" + query, true); File file = new File(tmpDir, String.format("%d.json", i)); objectFiles.add(file); String data = objectsJSON.toString(); Writer writer = new OutputStreamWriter(new FileOutputStream(file), "UTF-8"); writer.write(data); writer.close(); } // Build the index. ensureLocalIndex(); String[] objectFilePaths = new String[objectFiles.size()]; for (int i = 0; i < objectFiles.size(); ++i) objectFilePaths[i] = objectFiles.get(i).getAbsolutePath(); int status = localIndex.build(settingsFile.getAbsolutePath(), objectFilePaths); if (status != 200) { throw new AlgoliaException("Build index failed", status); } // Remember the last sync date. mirrorSettings.setLastSyncDate(new Date()); saveMirrorSettings(); } catch (Exception e) { Log.e(this.getClass().getName(), "Sync failed", e); } finally { // Clean up. if (tmpDir != null) { FileUtils.deleteRecursive(tmpDir); tmpDir = null; } settingsFile = null; objectFiles = null; // Mark sync as finished. synchronized (this) { syncing = false; } } } // ---------------------------------------------------------------------- // Search // ---------------------------------------------------------------------- public void searchASync(Query query, SearchListener listener) { // TODO: Should use offline only as a fallback mechanism. new SearchMirrorTask().execute(new TaskParams.Search(listener, query)); } private class SearchMirrorTask extends AsyncTask<TaskParams.Search, Void, JSONObject> { private SearchListener listener; private Query query; @Override protected JSONObject doInBackground(TaskParams.Search... params) { TaskParams.Search p = params[0]; listener = p.listener; query = p.query; return _searchMirror(query.getQueryString()); } @Override protected void onPostExecute(JSONObject jsonResult) { if (jsonResult != null) { listener.searchResult(MirroredIndex.this, query, jsonResult); } else { listener.searchError(MirroredIndex.this, query, new AlgoliaException("TODO")); } } } private JSONObject _searchMirror(String query) { if (!mirrored) throw new IllegalArgumentException("Mirroring not activated on this index"); try { ensureLocalIndex(); SearchResults searchResults = localIndex.search(query); if (searchResults.statusCode == 200) { String jsonString = new String(searchResults.data, "UTF-8"); JSONObject json = new JSONObject(jsonString); return json; } else { // TODO: Handle error } } catch (Exception e) { Log.e(this.getClass().getName(), "Sync failed", e); } return null; } }
Search first online, then offline if error and mirror available
algoliasearch/src/offline/java/com/algolia/search/saas/MirroredIndex.java
Search first online, then offline if error and mirror available
<ide><path>lgoliasearch/src/offline/java/com/algolia/search/saas/MirroredIndex.java <ide> <ide> public void searchASync(Query query, SearchListener listener) <ide> { <del> // TODO: Should use offline only as a fallback mechanism. <ide> new SearchMirrorTask().execute(new TaskParams.Search(listener, query)); <ide> } <ide> <del> private class SearchMirrorTask extends AsyncTask<TaskParams.Search, Void, JSONObject> <add> private class SearchMirrorTask extends AsyncTask<TaskParams.Search, Void, TaskParams.Search> <ide> { <ide> private SearchListener listener; <ide> private Query query; <ide> <ide> @Override <del> protected JSONObject doInBackground(TaskParams.Search... params) <add> protected TaskParams.Search doInBackground(TaskParams.Search... params) <ide> { <ide> TaskParams.Search p = params[0]; <ide> listener = p.listener; <ide> query = p.query; <del> return _searchMirror(query.getQueryString()); <add> // First search the online API. <add> try { <add> p.content = search(p.query); <add> } <add> catch (AlgoliaException e) { <add> // Fallback to the offline mirror if available. <add> if (mirrored) { <add> try { <add> p.content = _searchMirror(query.getQueryString()); <add> } <add> catch (AlgoliaException e2) { <add> p.error = e2; <add> } <add> } <add> else { <add> p.error = e; <add> } <add> } <add> return p; <ide> } <ide> <ide> @Override <del> protected void onPostExecute(JSONObject jsonResult) <add> protected void onPostExecute(TaskParams.Search p) <ide> { <del> if (jsonResult != null) { <del> listener.searchResult(MirroredIndex.this, query, jsonResult); <del> } <del> else { <del> listener.searchError(MirroredIndex.this, query, new AlgoliaException("TODO")); <del> } <del> } <del> } <del> <del> private JSONObject _searchMirror(String query) <add> p.sendResult(MirroredIndex.this); <add> } <add> } <add> <add> private JSONObject _searchMirror(String query) throws AlgoliaException <ide> { <ide> if (!mirrored) <ide> throw new IllegalArgumentException("Mirroring not activated on this index"); <ide> return json; <ide> } <ide> else { <del> // TODO: Handle error <add> throw new AlgoliaException(searchResults.errorMessage, searchResults.statusCode); <ide> } <ide> } <ide> catch (Exception e) { <del> Log.e(this.getClass().getName(), "Sync failed", e); <del> } <del> return null; <add> throw new AlgoliaException("Search failed", e); <add> } <ide> } <ide> }
Java
bsd-2-clause
ed516434d161be34f4a2d33005c42193e5c735eb
0
linkedin/LiTr,linkedin/LiTr,linkedin/LiTr
/* * Copyright 2019 LinkedIn Corporation * All Rights Reserved. * * Licensed under the BSD 2-Clause License (the "License"). See License in the project root for * license information. */ package com.linkedin.android.litr.transcoder; import android.media.MediaExtractor; import android.media.MediaFormat; import android.util.Log; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RestrictTo; import com.linkedin.android.litr.codec.Decoder; import com.linkedin.android.litr.codec.Encoder; import com.linkedin.android.litr.exception.TrackTranscoderException; import com.linkedin.android.litr.io.MediaSource; import com.linkedin.android.litr.io.MediaTarget; import com.linkedin.android.litr.render.PassthroughSoftwareRenderer; import com.linkedin.android.litr.render.Renderer; @RestrictTo(RestrictTo.Scope.LIBRARY) public class TrackTranscoderFactory { private static final String TAG = TrackTranscoderFactory.class.getSimpleName(); /** * Create a proper transcoder for a given source track and target media format. * * @param sourceTrack source track id * @param mediaSource {@link MediaExtractor} for reading data from the source * @param mediaTarget {@link MediaTarget} for writing data to the target * @param targetFormat {@link MediaFormat} with target video track parameters, null if writing "as is" * @return implementation of {@link TrackTranscoder} for a given track */ @NonNull public TrackTranscoder create(int sourceTrack, int targetTrack, @NonNull MediaSource mediaSource, @Nullable Decoder decoder, @Nullable Renderer renderer, @Nullable Encoder encoder, @NonNull MediaTarget mediaTarget, @Nullable MediaFormat targetFormat) throws TrackTranscoderException { if (targetFormat == null) { return new PassthroughTranscoder(mediaSource, sourceTrack, mediaTarget, targetTrack); } String trackMimeType = targetFormat.getString(MediaFormat.KEY_MIME); if (trackMimeType == null) { throw new TrackTranscoderException(TrackTranscoderException.Error.SOURCE_TRACK_MIME_TYPE_NOT_FOUND, targetFormat, null, null); } if (trackMimeType.startsWith("video") || trackMimeType.startsWith("audio")) { if (decoder == null) { throw new TrackTranscoderException(TrackTranscoderException.Error.DECODER_NOT_PROVIDED, targetFormat, null, null); } else if (encoder == null) { throw new TrackTranscoderException(TrackTranscoderException.Error.ENCODER_NOT_PROVIDED, targetFormat, null, null); } } if (trackMimeType.startsWith("video")) { if (renderer == null) { throw new TrackTranscoderException(TrackTranscoderException.Error.RENDERER_NOT_PROVIDED, targetFormat, null, null); } return new VideoTrackTranscoder(mediaSource, sourceTrack, mediaTarget, targetTrack, targetFormat, renderer, decoder, encoder); } else if (trackMimeType.startsWith("audio")) { Renderer audioRenderer = renderer == null ? new PassthroughSoftwareRenderer(encoder) : renderer; return new AudioTrackTranscoder(mediaSource, sourceTrack, mediaTarget, targetTrack, targetFormat, audioRenderer, decoder, encoder); } else { Log.i(TAG, "Unsupported track mime type: " + trackMimeType + ", will use passthrough transcoder"); return new PassthroughTranscoder(mediaSource, sourceTrack, mediaTarget, targetTrack); } } }
litr/src/main/java/com/linkedin/android/litr/transcoder/TrackTranscoderFactory.java
/* * Copyright 2019 LinkedIn Corporation * All Rights Reserved. * * Licensed under the BSD 2-Clause License (the "License"). See License in the project root for * license information. */ package com.linkedin.android.litr.transcoder; import android.media.MediaExtractor; import android.media.MediaFormat; import android.util.Log; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RestrictTo; import com.linkedin.android.litr.codec.Decoder; import com.linkedin.android.litr.codec.Encoder; import com.linkedin.android.litr.exception.TrackTranscoderException; import com.linkedin.android.litr.io.MediaSource; import com.linkedin.android.litr.io.MediaTarget; import com.linkedin.android.litr.render.PassthroughSoftwareRenderer; import com.linkedin.android.litr.render.Renderer; @RestrictTo(RestrictTo.Scope.LIBRARY) public class TrackTranscoderFactory { private static final String TAG = TrackTranscoderFactory.class.getSimpleName(); /** * Create a proper transcoder for a given source track and target media format. * * @param sourceTrack source track id * @param mediaSource {@link MediaExtractor} for reading data from the source * @param mediaTarget {@link MediaTarget} for writing data to the target * @param targetFormat {@link MediaFormat} with target video track parameters, null if writing "as is" * @return implementation of {@link TrackTranscoder} for a given track */ @NonNull public TrackTranscoder create(int sourceTrack, int targetTrack, @NonNull MediaSource mediaSource, @Nullable Decoder decoder, @Nullable Renderer renderer, @Nullable Encoder encoder, @NonNull MediaTarget mediaTarget, @Nullable MediaFormat targetFormat) throws TrackTranscoderException { if (targetFormat == null) { return new PassthroughTranscoder(mediaSource, sourceTrack, mediaTarget, targetTrack); } String trackMimeType = targetFormat.getString(MediaFormat.KEY_MIME); if (trackMimeType == null) { throw new TrackTranscoderException(TrackTranscoderException.Error.SOURCE_TRACK_MIME_TYPE_NOT_FOUND, targetFormat, null, null); } if (trackMimeType.startsWith("video") || trackMimeType.startsWith("audio")) { if (decoder == null) { throw new TrackTranscoderException(TrackTranscoderException.Error.DECODER_NOT_PROVIDED, targetFormat, null, null); } else if (encoder == null) { throw new TrackTranscoderException(TrackTranscoderException.Error.ENCODER_NOT_PROVIDED, targetFormat, null, null); } } // TODO move into statement above when audio renderer is implemented if (trackMimeType.startsWith("video") && renderer == null) { throw new TrackTranscoderException(TrackTranscoderException.Error.RENDERER_NOT_PROVIDED, targetFormat, null, null); } if (trackMimeType.startsWith("video")) { return new VideoTrackTranscoder(mediaSource, sourceTrack, mediaTarget, targetTrack, targetFormat, renderer, decoder, encoder); } else if (trackMimeType.startsWith("audio")) { Renderer audioRenderer = renderer == null ? new PassthroughSoftwareRenderer(encoder) : renderer; return new AudioTrackTranscoder(mediaSource, sourceTrack, mediaTarget, targetTrack, targetFormat, audioRenderer, decoder, encoder); } else { Log.i(TAG, "Unsupported track mime type: " + trackMimeType + ", will use passthrough transcoder"); return new PassthroughTranscoder(mediaSource, sourceTrack, mediaTarget, targetTrack); } } }
Move video renderer null check into video transcoder creation block (#89)
litr/src/main/java/com/linkedin/android/litr/transcoder/TrackTranscoderFactory.java
Move video renderer null check into video transcoder creation block (#89)
<ide><path>itr/src/main/java/com/linkedin/android/litr/transcoder/TrackTranscoderFactory.java <ide> null); <ide> } <ide> } <del> // TODO move into statement above when audio renderer is implemented <del> if (trackMimeType.startsWith("video") && renderer == null) { <del> throw new TrackTranscoderException(TrackTranscoderException.Error.RENDERER_NOT_PROVIDED, <del> targetFormat, <del> null, <del> null); <del> } <ide> <ide> if (trackMimeType.startsWith("video")) { <add> if (renderer == null) { <add> throw new TrackTranscoderException(TrackTranscoderException.Error.RENDERER_NOT_PROVIDED, <add> targetFormat, <add> null, <add> null); <add> } <ide> return new VideoTrackTranscoder(mediaSource, <ide> sourceTrack, <ide> mediaTarget,
Java
apache-2.0
c19a6504844a7df201a5b387941a4535fa4638db
0
WeRockStar/iosched,WeRockStar/iosched,jarekankowski/iosched,jarekankowski/iosched,WeRockStar/iosched,jarekankowski/iosched
/* * Copyright 2017 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.apps.iosched.server.userdata; /** * API keys. */ public class Ids { public static final String ANDROID_CLIENT_ID = "596109260910-nh4ecebl3fibbhrk5lld3lprk9id3ecq" + ".apps.googleusercontent.com"; public static final String IOS_CLIENT_ID = "596109260910-n1vrfjs8d7105jh5j7qf42ph32sltjp0" + ".apps.googleusercontent.com"; public static final String WEB_CLIENT_ID = "755839215930-ctkg839m67rtqmgm55c6eg1j7cvu5mmf" + ".apps.googleusercontent.com"; public static final String SERVICE_ACCOUNT_CLIENT_ID = "104433463769697768142"; public static final String SERVICE_ACCOUNT_EMAIL = "[email protected]"; public static final String ANDROID_AUDIENCE = ANDROID_CLIENT_ID; }
server/src/main/java/com/google/samples/apps/iosched/server/userdata/Ids.java
/* * Copyright 2017 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.samples.apps.iosched.server.userdata; /** * API keys. */ public class Ids { public static final String ANDROID_CLIENT_ID = "237695054204-m87hbqe20bqpib715p3hiddpjjfih2l9" + ".apps.googleusercontent.com"; public static final String IOS_CLIENT_ID = "596109260910-n1vrfjs8d7105jh5j7qf42ph32sltjp0" + ".apps.googleusercontent.com"; public static final String WEB_CLIENT_ID = "755839215930-ctkg839m67rtqmgm55c6eg1j7cvu5mmf" + ".apps.googleusercontent.com"; public static final String SERVICE_ACCOUNT_CLIENT_ID = "104433463769697768142"; public static final String SERVICE_ACCOUNT_EMAIL = "[email protected]"; public static final String ANDROID_AUDIENCE = ANDROID_CLIENT_ID; }
Updated Android client id for server Change-Id: I8c5331d926f982a4f3340102718645ec19f13ab9
server/src/main/java/com/google/samples/apps/iosched/server/userdata/Ids.java
Updated Android client id for server
<ide><path>erver/src/main/java/com/google/samples/apps/iosched/server/userdata/Ids.java <ide> * API keys. <ide> */ <ide> public class Ids { <del> public static final String ANDROID_CLIENT_ID = "237695054204-m87hbqe20bqpib715p3hiddpjjfih2l9" <del> + ".apps.googleusercontent.com"; <add> public static final String ANDROID_CLIENT_ID = "596109260910-nh4ecebl3fibbhrk5lld3lprk9id3ecq" + <add> ".apps.googleusercontent.com"; <ide> public static final String IOS_CLIENT_ID = "596109260910-n1vrfjs8d7105jh5j7qf42ph32sltjp0" <ide> + ".apps.googleusercontent.com"; <ide> public static final String WEB_CLIENT_ID = "755839215930-ctkg839m67rtqmgm55c6eg1j7cvu5mmf"
Java
apache-2.0
e80f4f96f2eec05e571778bc3479c151533bbd3f
0
yammer/azure-table
package com.yammer.collections.guava.azure.backup.tool; import com.google.common.base.Optional; import org.apache.commons.cli.ParseException; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import java.io.IOException; import java.net.URISyntaxException; import java.security.InvalidKeyException; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; @RunWith(MockitoJUnitRunner.class) public class BackupCLIParserTest { private static final String CONFIG_FILE_PATH = BackupToolIntegrationTest.class.getResource("testBackupAccountConfiguration.yml").getPath(); private static final String[] DO_BACKUP_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-b"}; private static final String[] DELETE_BAD_BACKUPS_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-db"}; private static final String[] DELETE_BACKUPS_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-d", "0"}; private static final String[] LIST_BACKUPS_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-l", "0"}; private static final String[] LIST_ALL_BACKUPS_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-la"}; private static final String[] RESTORE_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-r", "" + Long.MAX_VALUE}; private BackupCLIParser backupCLIParser; @Mock private BackupServiceFactory backupServiceFactoryMock; @Before public void setUp() { backupCLIParser = new BackupCLIParser(backupServiceFactoryMock, System.out, System.err); } @Test public void backupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(DO_BACKUP_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(DoBackupCommand.class))); } @Test public void deleteBadBackupsCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(DELETE_BAD_BACKUPS_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(DeleteBadBackupsCommand.class))); } @Test public void deleteBackupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(DELETE_BACKUPS_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(DeleteBackupsCommand.class))); } @Test public void listBackupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(LIST_BACKUPS_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(ListBackupsCommand.class))); } @Test public void listAllBackupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(LIST_ALL_BACKUPS_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(ListBackupsCommand.class))); } @Test public void restoreBackupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(RESTORE_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(RestoreFromBackupCommand.class))); } }
azure-table-backup/src/test/java/com/yammer/collections/guava/azure/backup/tool/BackupCLIParserTest.java
package com.yammer.collections.guava.azure.backup.tool; import com.google.common.base.Optional; import org.apache.commons.cli.ParseException; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import java.io.IOException; import java.net.URISyntaxException; import java.security.InvalidKeyException; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; // TODO improve this tests resolution, to verify correct parsing @RunWith(MockitoJUnitRunner.class) public class BackupCLIParserTest { private static final String CONFIG_FILE_PATH = BackupToolIntegrationTest.class.getResource("testBackupAccountConfiguration.yml").getPath(); private static final String[] DO_BACKUP_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-b"}; private static final String[] DELETE_BAD_BACKUPS_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-db"}; private static final String[] DELETE_BACKUPS_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-d", "0"}; private static final String[] LIST_BACKUPS_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-l", "0"}; private static final String[] LIST_ALL_BACKUPS_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-la"}; private static final String[] RESTORE_COMMAND_LINE = {"-cf", CONFIG_FILE_PATH, "-r", "" + Long.MAX_VALUE}; private BackupCLIParser backupCLIParser; @Mock private BackupServiceFactory backupServiceFactoryMock; @Before public void setUp() { backupCLIParser = new BackupCLIParser(backupServiceFactoryMock, System.out, System.err); } @Test public void backupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(DO_BACKUP_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(DoBackupCommand.class))); } @Test public void deleteBadBackupsCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(DELETE_BAD_BACKUPS_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(DeleteBadBackupsCommand.class))); } @Test public void deleteBackupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(DELETE_BACKUPS_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(DeleteBackupsCommand.class))); } @Test public void listBackupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(LIST_BACKUPS_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(ListBackupsCommand.class))); } @Test public void listAllBackupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(LIST_ALL_BACKUPS_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(ListBackupsCommand.class))); } @Test public void restoreBackupCommandLineOptionsParsedCorrectly() throws URISyntaxException, InvalidKeyException, ParseException, IOException { Optional<BackupCommand> createdCommand = backupCLIParser.parse(RESTORE_COMMAND_LINE); assertThat(createdCommand.get(), is(instanceOf(RestoreFromBackupCommand.class))); } }
drooped the idea to deep test command creation in the parser test. this will be covered by the individual command tests and the integration test
azure-table-backup/src/test/java/com/yammer/collections/guava/azure/backup/tool/BackupCLIParserTest.java
drooped the idea to deep test command creation in the parser test. this will be covered by the individual command tests and the integration test
<ide><path>zure-table-backup/src/test/java/com/yammer/collections/guava/azure/backup/tool/BackupCLIParserTest.java <ide> import static org.hamcrest.CoreMatchers.is; <ide> import static org.junit.Assert.assertThat; <ide> <del>// TODO improve this tests resolution, to verify correct parsing <ide> @RunWith(MockitoJUnitRunner.class) <ide> public class BackupCLIParserTest { <ide> private static final String CONFIG_FILE_PATH = BackupToolIntegrationTest.class.getResource("testBackupAccountConfiguration.yml").getPath();
Java
agpl-3.0
a591851beed21ff1a71d686b583228ca6204f1be
0
BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit
/* * Copyright (C) 2010 BloatIt. This file is part of BloatIt. BloatIt is free * software: you can redistribute it and/or modify it under the terms of the GNU * Affero General Public License as published by the Free Software Foundation, * either version 3 of the License, or (at your option) any later version. * BloatIt is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR * A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. You should have received a copy of the GNU Affero General Public * License along with BloatIt. If not, see <http://www.gnu.org/licenses/>. */ package com.bloatit.web.linkable.invoice; import java.util.List; import com.bloatit.framework.exceptions.highlevel.BadProgrammerException; import com.bloatit.framework.utils.PageIterable; import com.bloatit.framework.webprocessor.annotations.NonOptional; import com.bloatit.framework.webprocessor.annotations.Optional; import com.bloatit.framework.webprocessor.annotations.ParamContainer; import com.bloatit.framework.webprocessor.annotations.RequestParam; import com.bloatit.framework.webprocessor.annotations.tr; import com.bloatit.framework.webprocessor.annotations.RequestParam.Role; import com.bloatit.framework.webprocessor.context.Context; import com.bloatit.framework.webprocessor.context.Session; import com.bloatit.framework.webprocessor.url.Url; import com.bloatit.model.ContributionInvoice; import com.bloatit.model.Member; import com.bloatit.model.MilestoneContributionAmount; import com.bloatit.model.right.UnauthorizedOperationException; import com.bloatit.web.linkable.master.LoggedElveosAction; import com.bloatit.web.url.ContributionInvoicingInformationsActionUrl; import com.bloatit.web.url.ContributionInvoicingInformationsPageUrl; /** * Class that will create a new offer based on data received from a form. */ @ParamContainer("action/invoicing/contribution_invoicing_informations") public final class ContributionInvoicingInformationsAction extends LoggedElveosAction { @RequestParam(message = @tr("The process is closed, expired, missing or invalid.")) @NonOptional(@tr("The process is closed, expired, missing or invalid.")) private final ContributionInvoicingProcess process; @RequestParam(name = "applyVAT", role = Role.POST) @Optional private final List<String> applyVAT; @RequestParam(name = "generate", role = Role.POST) @Optional private final String generate; @RequestParam(name = "preview", role = Role.POST) @Optional private final String preview; @SuppressWarnings("unused") private final ContributionInvoicingInformationsActionUrl url; public ContributionInvoicingInformationsAction(final ContributionInvoicingInformationsActionUrl url) { super(url); this.url = url; this.process = url.getProcess(); this.applyVAT = url.getApplyVAT(); this.preview = url.getPreview(); this.generate = url.getGenerate(); } @Override public Url doProcessRestricted(final Member me) { if(preview != null) { // Return to previous page with the right values ContributionInvoicingInformationsPageUrl contributionInvoicingInformationsPageUrl = new ContributionInvoicingInformationsPageUrl(process); contributionInvoicingInformationsPageUrl.setApplyVAT(applyVAT); return contributionInvoicingInformationsPageUrl; } // Generate the invoices final PageIterable<MilestoneContributionAmount> contributionAmounts = process.getMilestone().getContributionAmounts(); for (final MilestoneContributionAmount contributionAmount : contributionAmounts) { try { new ContributionInvoice(process.getActor(), contributionAmount.getContribution().getAuthor(), "Contribution", "Contribution", contributionAmount.getAmount(), contributionAmount.getMilestone(), contributionAmount.getContribution(), applyVAT.contains(contributionAmount.getId().toString())); } catch (final UnauthorizedOperationException e) { throw new BadProgrammerException("Fail create a ContributionInvoice", e); } } Context.getSession().notifyGood(Context.trn("{0} invoice succefully generated. You can download it in the invoicing tab", "{0} invoices succefully generated. You can download them in the invoicing tab", contributionAmounts.size(), contributionAmounts.size())); return process.close(); } @Override protected Url checkRightsAndEverything(final Member me) { return NO_ERROR; } @Override protected Url doProcessErrors() { return session.pickPreferredPage(); } @Override protected String getRefusalReason() { return Context.tr("You must be logged to generate invoices."); } @Override protected void transmitParameters() { } }
main/src/main/java/com/bloatit/web/linkable/invoice/ContributionInvoicingInformationsAction.java
/* * Copyright (C) 2010 BloatIt. This file is part of BloatIt. BloatIt is free * software: you can redistribute it and/or modify it under the terms of the GNU * Affero General Public License as published by the Free Software Foundation, * either version 3 of the License, or (at your option) any later version. * BloatIt is distributed in the hope that it will be useful, but WITHOUT ANY * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR * A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. You should have received a copy of the GNU Affero General Public * License along with BloatIt. If not, see <http://www.gnu.org/licenses/>. */ package com.bloatit.web.linkable.invoice; import java.util.List; import com.bloatit.framework.exceptions.highlevel.BadProgrammerException; import com.bloatit.framework.utils.PageIterable; import com.bloatit.framework.webprocessor.annotations.NonOptional; import com.bloatit.framework.webprocessor.annotations.Optional; import com.bloatit.framework.webprocessor.annotations.ParamContainer; import com.bloatit.framework.webprocessor.annotations.RequestParam; import com.bloatit.framework.webprocessor.annotations.tr; import com.bloatit.framework.webprocessor.annotations.RequestParam.Role; import com.bloatit.framework.webprocessor.context.Context; import com.bloatit.framework.webprocessor.context.Session; import com.bloatit.framework.webprocessor.url.Url; import com.bloatit.model.ContributionInvoice; import com.bloatit.model.Member; import com.bloatit.model.MilestoneContributionAmount; import com.bloatit.model.right.UnauthorizedOperationException; import com.bloatit.web.linkable.master.LoggedElveosAction; import com.bloatit.web.url.ContributionInvoicingInformationsActionUrl; import com.bloatit.web.url.ContributionInvoicingInformationsPageUrl; /** * Class that will create a new offer based on data received from a form. */ @ParamContainer("action/invoicing/contribution_invoicing_informations") public final class ContributionInvoicingInformationsAction extends LoggedElveosAction { @RequestParam(message = @tr("The process is closed, expired, missing or invalid.")) @NonOptional(@tr("The process is closed, expired, missing or invalid.")) private final ContributionInvoicingProcess process; @RequestParam(name = "applyVAT", role = Role.POST) @Optional private final List<String> applyVAT; @RequestParam(name = "generate", role = Role.POST) @Optional private final String generate; @RequestParam(name = "preview", role = Role.POST) @Optional private final String preview; @SuppressWarnings("unused") private final ContributionInvoicingInformationsActionUrl url; public ContributionInvoicingInformationsAction(final ContributionInvoicingInformationsActionUrl url) { super(url); this.url = url; this.process = url.getProcess(); this.applyVAT = url.getApplyVAT(); this.preview = url.getPreview(); this.generate = url.getGenerate(); } @Override public Url doProcessRestricted(final Member me) { if(preview != null) { // Return to previous page with the right values ContributionInvoicingInformationsPageUrl contributionInvoicingInformationsPageUrl = new ContributionInvoicingInformationsPageUrl(process); contributionInvoicingInformationsPageUrl.setApplyVAT(applyVAT); return contributionInvoicingInformationsPageUrl; } // Generate the invoices final PageIterable<MilestoneContributionAmount> contributionAmounts = process.getMilestone().getContributionAmounts(); for (final MilestoneContributionAmount contributionAmount : contributionAmounts) { try { new ContributionInvoice(process.getActor(), contributionAmount.getContribution().getAuthor(), "Contribution", "Contribution", contributionAmount.getAmount(), contributionAmount.getMilestone(), contributionAmount.getContribution(), applyVAT.contains(contributionAmount.getId().toString())); } catch (final UnauthorizedOperationException e) { throw new BadProgrammerException("Fail create a ContributionInvoice", e); } } Context.getSession().notifyGood(Context.trn("{0} invoice succefully generated.", "{0} invoices succefully generated.", contributionAmounts.size(), contributionAmounts.size())); return process.close(); } @Override protected Url checkRightsAndEverything(final Member me) { return NO_ERROR; } @Override protected Url doProcessErrors() { return session.pickPreferredPage(); } @Override protected String getRefusalReason() { return Context.tr("You must be logged to generate invoices."); } @Override protected void transmitParameters() { } }
improve notification for invoices generation
main/src/main/java/com/bloatit/web/linkable/invoice/ContributionInvoicingInformationsAction.java
improve notification for invoices generation
<ide><path>ain/src/main/java/com/bloatit/web/linkable/invoice/ContributionInvoicingInformationsAction.java <ide> <ide> } <ide> <del> Context.getSession().notifyGood(Context.trn("{0} invoice succefully generated.", "{0} invoices succefully generated.", contributionAmounts.size(), contributionAmounts.size())); <add> Context.getSession().notifyGood(Context.trn("{0} invoice succefully generated. You can download it in the invoicing tab", "{0} invoices succefully generated. You can download them in the invoicing tab", contributionAmounts.size(), contributionAmounts.size())); <ide> <ide> return process.close(); <ide> }
JavaScript
agpl-3.0
230f5c8a6908db20f9d739703022940b73a7441a
0
telefonicaid/notification_server,frsela/notification_server,frsela/notification_server,telefonicaid/notification_server,telefonicaid/notification_server,frsela/notification_server,frsela/notification_server,telefonicaid/notification_server,telefonicaid/notification_server,frsela/notification_server
/* jshint node:true */ /** * PUSH Notification server * (c) Telefonica Digital, 2012 - All rights reserved * License: GNU Affero V3 (see LICENSE file) * Fernando Rodríguez Sela <[email protected]> * Guillermo Lopez Leal <[email protected]> */ 'use strict'; var amqp = require('amqp'), Log = require('./Logger.js'), queuesConf = require('../config.js').queue, events = require('events'), util = require('util'); // Constants var QUEUE_DISCONNECTED = 0; var QUEUE_CREATED = 1; var QUEUE_ERROR = 2; var QUEUE_CONNECTED = 3; var ALL_QUEUES_CLOSED_GRACE_PERIOD = 10000; function MsgBroker() { events.EventEmitter.call(this); this.queues = []; this.conns = []; this.exchangeNames = {}; this.controlledClose = false; } util.inherits(MsgBroker, events.EventEmitter); MsgBroker.prototype.start = function() { Log.info('msgBroker::queue.init --> Connecting to the queue servers'); //Create connection to the broker if (!Array.isArray(queuesConf)) { queuesConf = [queuesConf]; } for (var i = queuesConf.length - 1; i >= 0; i--) { this.createConnection(queuesConf[i]); } }; MsgBroker.prototype.stop = function() { this.controlledClose = true; this.queues.forEach(function(element) { if (element.queue) { element.end(); } }); Log.info('msgbroker::close --> Closing connection to msgBroker'); }; MsgBroker.prototype.subscribe = function(queueName, args, broker, callback) { if (this.controlledClose) { return; } if (broker && !Array.isArray(broker)) { broker = [broker]; } else { broker = this.queues; } broker = broker.filter(function(conn) { return conn.state === QUEUE_CONNECTED; }); broker.forEach(function(br) { var exchange = br.exchange(queueName + '-fanout', { type: 'fanout' }); var q = br.queue(queueName, args, function() { Log.info('msgbroker::subscribe --> Subscribed to queue: ' + queueName); q.bind(exchange, '*'); q.subscribe(callback); }); }); }; /** * Insert a new message into the queue */ MsgBroker.prototype.push = function(queueName, obj) { Log.debug('msgbroker::push --> Sending to the queue ' + queueName + ' the package:', obj); var i = this.queues.length; var exchangeName = queueName + '-fanout'; /** * We need to create an exchange for each of the queues where we need * to push. Remember that a SINGLE exchange is not possible since we can push * to two different queues (UDP and WS1, for example) */ while (!this.exchangeNames[exchangeName] && i > 0) { if (this.queues[i - 1]) { this.exchangeNames[exchangeName] = this.queues[i - 1].exchange(queueName + '-fanout', { type: 'fanout' }); } i--; } this.exchangeNames[exchangeName].publish(queueName, obj, { contentType: 'application/json', deliveryMode: 1 }); }; MsgBroker.prototype.createConnection = function(queuesConf) { var conn = new amqp.createConnection({ port: queuesConf.port, host: queuesConf.host, Login: queuesConf.Login, password: queuesConf.password, heartbeat: queuesConf.heartbeat }, { reconnect: true, reconnectBackoffStrategy: 'exponential' }); conn.state = QUEUE_CREATED; conn.id = Math.random(); this.conns.push(conn); this.allClosedGracePeriod = null; var self = this; // Events for this queue conn.on('ready', (function() { if (self.allClosedGracePeriod) { Log.info('msgbroker::queue::queue.ready --> Some queues recovered !'); clearTimeout(self.allClosedGracePeriod); self.allClosedGracePeriod = null; } conn.state = QUEUE_CONNECTED; Log.info('msgbroker::queue.ready --> Connected to one Message Broker, id=' + conn.id); self.queues.push(conn); self.emit('ready', conn); })); conn.on('close', (function() { if (conn.reconnecting) { return; } Log.info('msgbroker::queue.close --> Close on one Message Broker, id=' + conn.id); if (conn.state === QUEUE_CONNECTED) { conn.state = QUEUE_DISCONNECTED; } var index = self.queues.indexOf(conn); if (index >= 0) { self.queues.splice(index, 1); } var length = self.queues.length; var allDisconnected = self.conns.every(self.isDisconnected); var pending = self.conns.some(self.pending); if (length === 0 && allDisconnected && !pending) { if (!self.controlledClose) { Log.error('msgbroker::queue::queue.close --> All queues closed !'); self.allClosedGracePeriod = setTimeout(function() { Log.error('msgbroker::queue::queue.close --> Sending closing signal'); self.emit('closed'); self.stop(); }, ALL_QUEUES_CLOSED_GRACE_PERIOD); } } self.emit('queuedisconnected'); })); conn.on('error', (function(error) { if (conn.reconnecting) { return; } Log.error(Log.messages.ERROR_MBCONNECTIONERROR, { 'error': error, 'id': conn.id }); conn.state = QUEUE_ERROR; self.emit('queuedisconnected', error); self.exchange = undefined; })); conn.on('heartbeat', (function() { Log.debug('msgbroker::heartbeat'); })); }; MsgBroker.prototype.reconnectQueues = function() { Log.debug('Reconnecting to the Messages Queues'); for (var i = 0; i < this.conns.length; i++) { this.conns[i].reconnecting = true; Log.debug('Reconnecting connection ' + this.conns[i].id); this.conns[i].disconnect(); setTimeout(function() { Log.debug('Reconnecting ' + this.id + ' finished'); this.reconnecting = false; }.bind(this.conns[i]), 60000); }; } MsgBroker.prototype.isDisconnected = function(element) { return element.state !== QUEUE_CONNECTED; }; MsgBroker.prototype.pending = function(element) { return element.state === QUEUE_CREATED; }; var _msg = new MsgBroker(); function getMsgBroker() { return _msg; } module.exports = getMsgBroker();
src/common/MsgBroker.js
/* jshint node:true */ /** * PUSH Notification server * (c) Telefonica Digital, 2012 - All rights reserved * License: GNU Affero V3 (see LICENSE file) * Fernando Rodríguez Sela <[email protected]> * Guillermo Lopez Leal <[email protected]> */ 'use strict'; var amqp = require('amqp'), Log = require('./Logger.js'), queuesConf = require('../config.js').queue, events = require('events'), util = require('util'); // Constants var QUEUE_DISCONNECTED = 0; var QUEUE_CREATED = 1; var QUEUE_ERROR = 2; var QUEUE_CONNECTED = 3; var ALL_QUEUES_CLOSED_GRACE_PERIOD = 10000; function MsgBroker() { events.EventEmitter.call(this); this.queues = []; this.conns = []; this.exchangeNames = {}; this.controlledClose = false; } util.inherits(MsgBroker, events.EventEmitter); MsgBroker.prototype.start = function() { Log.info('msgBroker::queue.init --> Connecting to the queue servers'); //Create connection to the broker if (!Array.isArray(queuesConf)) { queuesConf = [queuesConf]; } for (var i = queuesConf.length - 1; i >= 0; i--) { this.createConnection(queuesConf[i]); } }; MsgBroker.prototype.stop = function() { this.controlledClose = true; this.queues.forEach(function(element) { if (element.queue) { element.end(); } }); Log.info('msgbroker::close --> Closing connection to msgBroker'); }; MsgBroker.prototype.subscribe = function(queueName, args, broker, callback) { if (this.controlledClose) { return; } if (broker && !Array.isArray(broker)) { broker = [broker]; } else { broker = this.queues; } broker = broker.filter(function(conn) { return conn.state === QUEUE_CONNECTED; }); broker.forEach(function(br) { if (br.reconnecting) { Log.debug('Avoiding create new subscriptions'); return; } var exchange = br.exchange(queueName + '-fanout', { type: 'fanout' }); var q = br.queue(queueName, args, function() { Log.info('msgbroker::subscribe --> Subscribed to queue: ' + queueName); q.bind(exchange, '*'); q.subscribe(callback); }); }); }; /** * Insert a new message into the queue */ MsgBroker.prototype.push = function(queueName, obj) { Log.debug('msgbroker::push --> Sending to the queue ' + queueName + ' the package:', obj); var i = this.queues.length; var exchangeName = queueName + '-fanout'; /** * We need to create an exchange for each of the queues where we need * to push. Remember that a SINGLE exchange is not possible since we can push * to two different queues (UDP and WS1, for example) */ while (!this.exchangeNames[exchangeName] && i > 0) { if (this.queues[i - 1]) { this.exchangeNames[exchangeName] = this.queues[i - 1].exchange(queueName + '-fanout', { type: 'fanout' }); } i--; } this.exchangeNames[exchangeName].publish(queueName, obj, { contentType: 'application/json', deliveryMode: 1 }); }; MsgBroker.prototype.createConnection = function(queuesConf) { var conn = new amqp.createConnection({ port: queuesConf.port, host: queuesConf.host, Login: queuesConf.Login, password: queuesConf.password, heartbeat: queuesConf.heartbeat }, { reconnect: true, reconnectBackoffStrategy: 'exponential' }); conn.state = QUEUE_CREATED; conn.id = Math.random(); this.conns.push(conn); this.allClosedGracePeriod = null; var self = this; // Events for this queue conn.on('ready', (function() { if (self.allClosedGracePeriod) { Log.info('msgbroker::queue::queue.ready --> Some queues recovered !'); clearTimeout(self.allClosedGracePeriod); self.allClosedGracePeriod = null; } conn.state = QUEUE_CONNECTED; Log.info('msgbroker::queue.ready --> Connected to one Message Broker, id=' + conn.id); self.queues.push(conn); self.emit('ready', conn); })); conn.on('close', (function() { if (conn.reconnecting) { return; } Log.info('msgbroker::queue.close --> Close on one Message Broker, id=' + conn.id); if (conn.state === QUEUE_CONNECTED) { conn.state = QUEUE_DISCONNECTED; } var index = self.queues.indexOf(conn); if (index >= 0) { self.queues.splice(index, 1); } var length = self.queues.length; var allDisconnected = self.conns.every(self.isDisconnected); var pending = self.conns.some(self.pending); if (length === 0 && allDisconnected && !pending) { if (!self.controlledClose) { Log.error('msgbroker::queue::queue.close --> All queues closed !'); self.allClosedGracePeriod = setTimeout(function() { Log.error('msgbroker::queue::queue.close --> Sending closing signal'); self.emit('closed'); self.stop(); }, ALL_QUEUES_CLOSED_GRACE_PERIOD); } } self.emit('queuedisconnected'); })); conn.on('error', (function(error) { if (conn.reconnecting) { return; } Log.error(Log.messages.ERROR_MBCONNECTIONERROR, { 'error': error, 'id': conn.id }); conn.state = QUEUE_ERROR; self.emit('queuedisconnected', error); self.exchange = undefined; })); conn.on('heartbeat', (function() { Log.debug('msgbroker::heartbeat'); })); }; MsgBroker.prototype.reconnectQueues = function() { Log.debug('Reconnecting to the Messages Queues'); for (var i = 0; i < this.conns.length; i++) { this.conns[i].reconnecting = true; Log.debug('Reconnecting connection ' + this.conns[i].id); this.conns[i].disconnect(); setTimeout(function() { Log.debug('Reconnecting ' + this.id + ' finished'); this.reconnecting = false; }.bind(this.conns[i]), 60000); }; } MsgBroker.prototype.isDisconnected = function(element) { return element.state !== QUEUE_CONNECTED; }; MsgBroker.prototype.pending = function(element) { return element.state === QUEUE_CREATED; }; var _msg = new MsgBroker(); function getMsgBroker() { return _msg; } module.exports = getMsgBroker();
Resubscribe after MQ fail
src/common/MsgBroker.js
Resubscribe after MQ fail
<ide><path>rc/common/MsgBroker.js <ide> }); <ide> <ide> broker.forEach(function(br) { <del> if (br.reconnecting) { <del> Log.debug('Avoiding create new subscriptions'); <del> return; <del> } <ide> var exchange = br.exchange(queueName + '-fanout', { <ide> type: 'fanout' <ide> });
Java
mpl-2.0
24dda6428c0ba0c490b4ec28e0efa557c2aadaae
0
msteinhoff/hello-world
c6f2218f-cb8e-11e5-9e8f-00264a111016
src/main/java/HelloWorld.java
c6d4ec70-cb8e-11e5-8159-00264a111016
I'm done
src/main/java/HelloWorld.java
I'm done
<ide><path>rc/main/java/HelloWorld.java <del>c6d4ec70-cb8e-11e5-8159-00264a111016 <add>c6f2218f-cb8e-11e5-9e8f-00264a111016
Java
apache-2.0
865a41026cdf6de25272bf1c66f3222d51170d9a
0
neo4j/neo4j-java-driver,neo4j/neo4j-java-driver,neo4j/neo4j-java-driver
/** * Copyright (c) 2002-2016 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.neo4j.driver.internal; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.neo4j.driver.internal.net.BoltServerAddress; import org.neo4j.driver.internal.security.SecurityPlan; import org.neo4j.driver.internal.spi.Connection; import org.neo4j.driver.internal.spi.ConnectionPool; import org.neo4j.driver.internal.util.Clock; import org.neo4j.driver.internal.util.ConcurrentRoundRobinSet; import org.neo4j.driver.internal.util.Consumer; import org.neo4j.driver.v1.AccessMode; import org.neo4j.driver.v1.Logging; import org.neo4j.driver.v1.Record; import org.neo4j.driver.v1.Session; import org.neo4j.driver.v1.StatementResult; import org.neo4j.driver.v1.Value; import org.neo4j.driver.v1.exceptions.ClientException; import org.neo4j.driver.v1.exceptions.ConnectionFailureException; import org.neo4j.driver.v1.exceptions.ServiceUnavailableException; import org.neo4j.driver.v1.util.Function; import static java.lang.String.format; public class RoutingDriver extends BaseDriver { private static final String GET_SERVERS = "dbms.cluster.routing.getServers"; private static final long MAX_TTL = Long.MAX_VALUE / 1000L; private final static Comparator<BoltServerAddress> COMPARATOR = new Comparator<BoltServerAddress>() { @Override public int compare( BoltServerAddress o1, BoltServerAddress o2 ) { int compare = o1.host().compareTo( o2.host() ); if ( compare == 0 ) { compare = Integer.compare( o1.port(), o2.port() ); } return compare; } }; private static final int MIN_ROUTERS = 1; private final ConnectionPool connections; private final Function<Connection,Session> sessionProvider; private final Clock clock; private final ConcurrentRoundRobinSet<BoltServerAddress> routingServers = new ConcurrentRoundRobinSet<>( COMPARATOR ); private final ConcurrentRoundRobinSet<BoltServerAddress> readServers = new ConcurrentRoundRobinSet<>( COMPARATOR ); private final ConcurrentRoundRobinSet<BoltServerAddress> writeServers = new ConcurrentRoundRobinSet<>( COMPARATOR ); private final AtomicLong expires = new AtomicLong( 0L ); public RoutingDriver( BoltServerAddress seedAddress, ConnectionPool connections, SecurityPlan securityPlan, Function<Connection,Session> sessionProvider, Clock clock, Logging logging ) { super( securityPlan, logging ); routingServers.add( seedAddress ); this.connections = connections; this.sessionProvider = sessionProvider; this.clock = clock; checkServers(); } private void checkServers() { synchronized ( routingServers ) { if ( expires.get() < clock.millis() || routingServers.size() <= MIN_ROUTERS || readServers.isEmpty() || writeServers.isEmpty() ) { getServers(); } } } private Set<BoltServerAddress> forgetAllServers() { final Set<BoltServerAddress> seen = new HashSet<>(); seen.addAll( routingServers ); seen.addAll( readServers ); seen.addAll( writeServers ); readServers.clear(); writeServers.clear(); return seen; } private long calculateNewExpiry( Record record ) { long ttl = record.get( "ttl" ).asLong(); long nextExpiry = clock.millis() + 1000L * ttl; if ( ttl < 0 || ttl >= MAX_TTL || nextExpiry < 0 ) { return Long.MAX_VALUE; } else { return nextExpiry; } } //must be called from a synchronized block private void getServers() { BoltServerAddress address = null; try { boolean success = false; final Set<BoltServerAddress> newRouters = new HashSet<>( ); final Set<BoltServerAddress> seen = forgetAllServers(); while ( !routingServers.isEmpty() && !success ) { address = routingServers.hop(); success = call( address, GET_SERVERS, new Consumer<Record>() { @Override public void accept( Record record ) { expires.set( calculateNewExpiry( record ) ); List<ServerInfo> servers = servers( record ); for ( ServerInfo server : servers ) { seen.removeAll( server.addresses() ); switch ( server.role() ) { case "READ": readServers.addAll( server.addresses() ); break; case "WRITE": writeServers.addAll( server.addresses() ); break; case "ROUTE": newRouters.addAll( server.addresses() ); break; } } } } ); //We got trough but server gave us an empty list of routers if (success && newRouters.isEmpty()) { success = false; } else if (success) { routingServers.clear(); routingServers.addAll( newRouters ); } } if ( !success ) { throw new ServiceUnavailableException( "Run out of servers" ); } //the server no longer think we should care about these for ( BoltServerAddress remove : seen ) { connections.purge( remove ); } } catch ( Exception ex ) { //discovery failed, not much to do, stick with what we've got //this may happen because server is running in standalone mode this.close(); throw new ServiceUnavailableException( String.format( "Server %s couldn't perform discovery", address == null ? "`UNKNOWN`" : address.toString() ), ex ); } } private static class ServerInfo { private final List<BoltServerAddress> addresses; private final String role; public ServerInfo( List<BoltServerAddress> addresses, String role ) { this.addresses = addresses; this.role = role; } public String role() { return role; } List<BoltServerAddress> addresses() { return addresses; } } private List<ServerInfo> servers( Record record ) { return record.get( "servers" ).asList( new Function<Value,ServerInfo>() { @Override public ServerInfo apply( Value value ) { return new ServerInfo( value.get( "addresses" ).asList( new Function<Value,BoltServerAddress>() { @Override public BoltServerAddress apply( Value value ) { return new BoltServerAddress( value.asString() ); } } ), value.get( "role" ).asString() ); } } ); } //must be called from a synchronized method private boolean call( BoltServerAddress address, String procedureName, Consumer<Record> recorder ) { Connection acquire; Session session = null; try { acquire = connections.acquire( address ); session = sessionProvider.apply( acquire ); StatementResult records = session.run( format( "CALL %s", procedureName ) ); //got a result but was empty if (!records.hasNext()) { forget( address ); return false; } //consume the results while ( records.hasNext() ) { recorder.accept( records.next() ); } } catch ( Throwable e ) { log.error( e.getMessage(), e ); forget( address ); return false; } finally { if ( session != null ) { session.close(); } } return true; } private synchronized void forget( BoltServerAddress address ) { connections.purge( address ); if ( routingServers.remove( address ) ) { log.debug( "Removing %s from routers", address.toString() ); } if (readServers.remove( address ) ) { log.debug( "Removing %s from readers", address.toString() ); } if (writeServers.remove( address )) { log.debug( "Removing %s from writers", address.toString() ); } } @Override public Session session() { return session( AccessMode.WRITE ); } @Override public Session session( final AccessMode mode ) { return new RoutingNetworkSession( mode, acquireConnection( mode ), new RoutingErrorHandler() { @Override public void onConnectionFailure( BoltServerAddress address ) { forget( address ); } @Override public void onWriteFailure( BoltServerAddress address ) { writeServers.remove( address ); } }); } private Connection acquireConnection( AccessMode role ) { ConcurrentRoundRobinSet<BoltServerAddress> servers; switch ( role ) { case READ: servers = readServers; break; case WRITE: servers = writeServers; break; default: throw new ClientException( role + " is not supported for creating new sessions" ); } //Potentially rediscover servers if we are not happy with our current knowledge checkServers(); int numberOfServers = servers.size(); for ( int i = 0; i < numberOfServers; i++ ) { BoltServerAddress address = servers.hop(); try { return connections.acquire( address ); } catch ( ConnectionFailureException e ) { forget( address ); } } throw new ConnectionFailureException( "Failed to connect to any servers" ); } @Override public void close() { try { connections.close(); } catch ( Exception ex ) { log.error( format( "~~ [ERROR] %s", ex.getMessage() ), ex ); } } //For testing Set<BoltServerAddress> routingServers() { return Collections.unmodifiableSet( routingServers ); } //For testing Set<BoltServerAddress> readServers() { return Collections.unmodifiableSet( readServers ); } //For testing Set<BoltServerAddress> writeServers() { return Collections.unmodifiableSet( writeServers ); } //For testing ConnectionPool connectionPool() { return connections; } }
driver/src/main/java/org/neo4j/driver/internal/RoutingDriver.java
/** * Copyright (c) 2002-2016 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.neo4j.driver.internal; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.neo4j.driver.internal.net.BoltServerAddress; import org.neo4j.driver.internal.security.SecurityPlan; import org.neo4j.driver.internal.spi.Connection; import org.neo4j.driver.internal.spi.ConnectionPool; import org.neo4j.driver.internal.util.Clock; import org.neo4j.driver.internal.util.ConcurrentRoundRobinSet; import org.neo4j.driver.internal.util.Consumer; import org.neo4j.driver.v1.AccessMode; import org.neo4j.driver.v1.Logging; import org.neo4j.driver.v1.Record; import org.neo4j.driver.v1.Session; import org.neo4j.driver.v1.StatementResult; import org.neo4j.driver.v1.Value; import org.neo4j.driver.v1.exceptions.ClientException; import org.neo4j.driver.v1.exceptions.ConnectionFailureException; import org.neo4j.driver.v1.exceptions.ServiceUnavailableException; import org.neo4j.driver.v1.util.Function; import static java.lang.String.format; public class RoutingDriver extends BaseDriver { private static final String GET_SERVERS = "dbms.cluster.routing.getServers"; private static final long MAX_TTL = Long.MAX_VALUE / 1000L; private final static Comparator<BoltServerAddress> COMPARATOR = new Comparator<BoltServerAddress>() { @Override public int compare( BoltServerAddress o1, BoltServerAddress o2 ) { int compare = o1.host().compareTo( o2.host() ); if ( compare == 0 ) { compare = Integer.compare( o1.port(), o2.port() ); } return compare; } }; private static final int MIN_SERVERS = 1; private static final int CONNECTION_RETRIES = 3; private final ConnectionPool connections; private final Function<Connection,Session> sessionProvider; private final Clock clock; private final ConcurrentRoundRobinSet<BoltServerAddress> routingServers = new ConcurrentRoundRobinSet<>( COMPARATOR ); private final ConcurrentRoundRobinSet<BoltServerAddress> readServers = new ConcurrentRoundRobinSet<>( COMPARATOR ); private final ConcurrentRoundRobinSet<BoltServerAddress> writeServers = new ConcurrentRoundRobinSet<>( COMPARATOR ); private final AtomicLong expires = new AtomicLong( 0L ); public RoutingDriver( BoltServerAddress seedAddress, ConnectionPool connections, SecurityPlan securityPlan, Function<Connection,Session> sessionProvider, Clock clock, Logging logging ) { super( securityPlan, logging ); routingServers.add( seedAddress ); this.connections = connections; this.sessionProvider = sessionProvider; this.clock = clock; checkServers(); } private void checkServers() { synchronized ( routingServers ) { if ( expires.get() < clock.millis() || routingServers.size() < MIN_SERVERS || readServers.isEmpty() || writeServers.isEmpty() ) { getServers(); } } } private Set<BoltServerAddress> forgetAllServers() { final Set<BoltServerAddress> seen = new HashSet<>(); seen.addAll( routingServers ); seen.addAll( readServers ); seen.addAll( writeServers ); readServers.clear(); writeServers.clear(); return seen; } private long calculateNewExpiry( Record record ) { long ttl = record.get( "ttl" ).asLong(); long nextExpiry = clock.millis() + 1000L * ttl; if ( ttl < 0 || ttl >= MAX_TTL || nextExpiry < 0 ) { return Long.MAX_VALUE; } else { return nextExpiry; } } //must be called from a synchronized block private void getServers() { BoltServerAddress address = null; try { boolean success = false; final Set<BoltServerAddress> newRouters = new HashSet<>( ); final Set<BoltServerAddress> seen = forgetAllServers(); while ( !routingServers.isEmpty() && !success ) { address = routingServers.hop(); success = call( address, GET_SERVERS, new Consumer<Record>() { @Override public void accept( Record record ) { expires.set( calculateNewExpiry( record ) ); List<ServerInfo> servers = servers( record ); for ( ServerInfo server : servers ) { seen.removeAll( server.addresses() ); switch ( server.role() ) { case "READ": readServers.addAll( server.addresses() ); break; case "WRITE": writeServers.addAll( server.addresses() ); break; case "ROUTE": newRouters.addAll( server.addresses() ); break; } } } } ); //We got trough but server gave us an empty list of routers if (success && newRouters.isEmpty()) { success = false; } else if (success) { routingServers.clear(); routingServers.addAll( newRouters ); } } if ( !success ) { throw new ServiceUnavailableException( "Run out of servers" ); } //the server no longer think we should care about these for ( BoltServerAddress remove : seen ) { connections.purge( remove ); } } catch ( Exception ex ) { //discovery failed, not much to do, stick with what we've got //this may happen because server is running in standalone mode this.close(); throw new ServiceUnavailableException( String.format( "Server %s couldn't perform discovery", address == null ? "`UNKNOWN`" : address.toString() ), ex ); } } private static class ServerInfo { private final List<BoltServerAddress> addresses; private final String role; public ServerInfo( List<BoltServerAddress> addresses, String role ) { this.addresses = addresses; this.role = role; } public String role() { return role; } List<BoltServerAddress> addresses() { return addresses; } } private List<ServerInfo> servers( Record record ) { return record.get( "servers" ).asList( new Function<Value,ServerInfo>() { @Override public ServerInfo apply( Value value ) { return new ServerInfo( value.get( "addresses" ).asList( new Function<Value,BoltServerAddress>() { @Override public BoltServerAddress apply( Value value ) { return new BoltServerAddress( value.asString() ); } } ), value.get( "role" ).asString() ); } } ); } //must be called from a synchronized method private boolean call( BoltServerAddress address, String procedureName, Consumer<Record> recorder ) { Connection acquire; Session session = null; try { acquire = connections.acquire( address ); session = sessionProvider.apply( acquire ); StatementResult records = session.run( format( "CALL %s", procedureName ) ); //got a result but was empty if (!records.hasNext()) { forget( address ); return false; } //consume the results while ( records.hasNext() ) { recorder.accept( records.next() ); } } catch ( Throwable e ) { log.error( e.getMessage(), e ); forget( address ); return false; } finally { if ( session != null ) { session.close(); } } return true; } private synchronized void forget( BoltServerAddress address ) { connections.purge( address ); routingServers.remove( address ); readServers.remove( address ); writeServers.remove( address ); } @Override public Session session() { return session( AccessMode.WRITE ); } @Override public Session session( final AccessMode mode ) { return new RoutingNetworkSession( mode, acquireConnection( mode ), new RoutingErrorHandler() { @Override public void onConnectionFailure( BoltServerAddress address ) { forget( address ); } @Override public void onWriteFailure( BoltServerAddress address ) { writeServers.remove( address ); } }); } private Connection acquireConnection( AccessMode role ) { ConcurrentRoundRobinSet<BoltServerAddress> servers; switch ( role ) { case READ: servers = readServers; break; case WRITE: servers = writeServers; break; default: throw new ClientException( role + " is not supported for creating new sessions" ); } //Potentially rediscover servers if we are not happy with our current knowledge checkServers(); int numberOfServers = servers.size(); for ( int i = 0; i < numberOfServers; i++ ) { BoltServerAddress address = servers.hop(); try { return connections.acquire( address ); } catch ( ConnectionFailureException e ) { forget( address ); } } throw new ConnectionFailureException( "Failed to connect to any servers" ); } @Override public void close() { try { connections.close(); } catch ( Exception ex ) { log.error( format( "~~ [ERROR] %s", ex.getMessage() ), ex ); } } //For testing Set<BoltServerAddress> routingServers() { return Collections.unmodifiableSet( routingServers ); } //For testing Set<BoltServerAddress> readServers() { return Collections.unmodifiableSet( readServers ); } //For testing Set<BoltServerAddress> writeServers() { return Collections.unmodifiableSet( writeServers ); } //For testing ConnectionPool connectionPool() { return connections; } }
Check when only one router left Waiting until we completely run out of routing servers is too late, we must force a call to `getServers` when there is only one known router.
driver/src/main/java/org/neo4j/driver/internal/RoutingDriver.java
Check when only one router left
<ide><path>river/src/main/java/org/neo4j/driver/internal/RoutingDriver.java <ide> return compare; <ide> } <ide> }; <del> private static final int MIN_SERVERS = 1; <del> private static final int CONNECTION_RETRIES = 3; <add> private static final int MIN_ROUTERS = 1; <ide> private final ConnectionPool connections; <ide> private final Function<Connection,Session> sessionProvider; <ide> private final Clock clock; <ide> synchronized ( routingServers ) <ide> { <ide> if ( expires.get() < clock.millis() || <del> routingServers.size() < MIN_SERVERS || <add> routingServers.size() <= MIN_ROUTERS || <ide> readServers.isEmpty() || <ide> writeServers.isEmpty() ) <ide> { <ide> private synchronized void forget( BoltServerAddress address ) <ide> { <ide> connections.purge( address ); <del> routingServers.remove( address ); <del> readServers.remove( address ); <del> writeServers.remove( address ); <add> if ( routingServers.remove( address ) ) <add> { <add> log.debug( "Removing %s from routers", address.toString() ); <add> } <add> if (readServers.remove( address ) ) <add> { <add> log.debug( "Removing %s from readers", address.toString() ); <add> } <add> if (writeServers.remove( address )) <add> { <add> log.debug( "Removing %s from writers", address.toString() ); <add> } <ide> } <ide> <ide> @Override
JavaScript
bsd-3-clause
db1fc144e2a0210752326bff666d9a277b28518f
0
dustinmoorman/tau.pe,dustinmoorman/tau.pe
var db = require("mongojs").connect("localhost:27017/taupe", ["url"]); var slugExists = function(slug){ db.url.findOne({"slug": slug}, function(error, match){ return match != null; }); }; var generateSlug = function(){ var lib = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; var slug = ""; do { for( var i=0; i < Math.floor((Math.random() * 8)+1); i++ ) slug += lib.charAt(Math.floor(Math.random() * lib.length)); } while(slugExists(slug)); return slug; }; exports.getUrl = function(request, response){ db.url.findOne({"slug": request.params.id}, function(error, url){ if(error){ response.json(error); } else { response.json(url); } }); }; exports.addUrl = function(request, response){ var urlRegex = /(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?/; var json = {}; if(urlRegex.test(request.body.url)){ var slug = generateSlug(); db.url.insert({ "url": request.body.url, "slug": slug, "date": Date.now() }); json = {"slug": slug, "url": request.body.url}; } response.json(json); };
routes/api.js
var db = require("mongojs").connect("localhost:27017/taupe", ["url"]); var slugExists = function(slug){ db.url.findOne({"slug": slug}, function(error, match){ return match != null; }); }; var generateSlug = function(){ var lib = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; var slug = ""; do { for( var i=0; i < Math.floor((Math.random() * 8)+1); i++ ) slug += lib.charAt(Math.floor(Math.random() * lib.length)); } while(slugExists(slug)); return slug; }; exports.getUrl = function(request, response){ db.url.findOne({"slug": request.params.id}, function(error, url){ if(error){ response.json(error); } else { response.json(url); } }); }; exports.addUrl = function(request, response){ var urlRegex = /(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?/; var json = {}; if(urlRegex.test(request.body.url)){ var slug = generateSlug(); db.url.insert({ "url": request.body.url, "slug": slug, "ip": request.connection.remoteAddress, "date": Date.now() }); json = {"slug": slug, "url": request.body.url}; } response.json(json); };
don't actually need this.
routes/api.js
don't actually need this.
<ide><path>outes/api.js <ide> db.url.insert({ <ide> "url": request.body.url, <ide> "slug": slug, <del> "ip": request.connection.remoteAddress, <ide> "date": Date.now() <ide> }); <ide>
Java
apache-2.0
error: pathspec 'WebCrawler/src/ch/ice/controller/ExcelWriter.java' did not match any file(s) known to git
f90c2ccc68622f09b8650279c714086e352d6169
1
LuzernWGProjects/Schurter
/** * */ package ch.ice.controller; import ch.ice.controller.interf.Writer; /** * @author Oliver * */ public class ExcelWriter implements Writer { HSSFWorkbook workbook = new HSSFWorkbook(); HSSFSheet sheet = workbook.createSheet("Sample sheet"); Map<String, Object[]> data = new HashMap<String, Object[]>(); data.put("1", new Object[] {"Emp No.", "Name", "Salary"}); data.put("2", new Object[] {1d, "John", 1500000d}); data.put("3", new Object[] {2d, "Sam", 800000d}); data.put("4", new Object[] {3d, "Dean", 700000d}); Set<String> keyset = data.keySet(); int rownum = 0; for (String key : keyset) { Row row = sheet.createRow(rownum++); Object [] objArr = data.get(key); int cellnum = 0; for (Object obj : objArr) { Cell cell = row.createCell(cellnum++); if(obj instanceof Date) cell.setCellValue((Date)obj); else if(obj instanceof Boolean) cell.setCellValue((Boolean)obj); else if(obj instanceof String) cell.setCellValue((String)obj); else if(obj instanceof Double) cell.setCellValue((Double)obj); } } try { FileOutputStream out = new FileOutputStream(new File("C:\\new.xls")); workbook.write(out); out.close(); System.out.println("Excel written successfully.."); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }
WebCrawler/src/ch/ice/controller/ExcelWriter.java
added excelwriter codesnipped
WebCrawler/src/ch/ice/controller/ExcelWriter.java
added excelwriter codesnipped
<ide><path>ebCrawler/src/ch/ice/controller/ExcelWriter.java <add>/** <add> * <add> */ <add>package ch.ice.controller; <add> <add>import ch.ice.controller.interf.Writer; <add> <add>/** <add> * @author Oliver <add> * <add> */ <add>public class ExcelWriter implements Writer { <add> <add> HSSFWorkbook workbook = new HSSFWorkbook(); <add> HSSFSheet sheet = workbook.createSheet("Sample sheet"); <add> <add> Map<String, Object[]> data = new HashMap<String, Object[]>(); <add> data.put("1", new Object[] {"Emp No.", "Name", "Salary"}); <add> data.put("2", new Object[] {1d, "John", 1500000d}); <add> data.put("3", new Object[] {2d, "Sam", 800000d}); <add> data.put("4", new Object[] {3d, "Dean", 700000d}); <add> <add> Set<String> keyset = data.keySet(); <add> int rownum = 0; <add> for (String key : keyset) { <add> Row row = sheet.createRow(rownum++); <add> Object [] objArr = data.get(key); <add> int cellnum = 0; <add> for (Object obj : objArr) { <add> Cell cell = row.createCell(cellnum++); <add> if(obj instanceof Date) <add> cell.setCellValue((Date)obj); <add> else if(obj instanceof Boolean) <add> cell.setCellValue((Boolean)obj); <add> else if(obj instanceof String) <add> cell.setCellValue((String)obj); <add> else if(obj instanceof Double) <add> cell.setCellValue((Double)obj); <add> } <add> } <add> <add> try { <add> FileOutputStream out = <add> new FileOutputStream(new File("C:\\new.xls")); <add> workbook.write(out); <add> out.close(); <add> System.out.println("Excel written successfully.."); <add> <add> } catch (FileNotFoundException e) { <add> e.printStackTrace(); <add> } catch (IOException e) { <add> e.printStackTrace(); <add> } <add> <add>}
JavaScript
mit
86e356385608c331f73aa2b01b7334821c10d938
0
boffinHouse/rawblock,boffinHouse/rawblock
/** * original by * http://krasimirtsonev.com/blog/article/A-modern-JavaScript-router-in-100-lines-history-api-pushState-hash-url */ import rb from './global-rb'; import deserialize from './deserialize'; import getID from './get-id'; import addLog from './add-log'; const regPlus = /\+/g; const regSlashBegin = /^\//; const regSlashEnd = /\/$/; const regFullHash = /#(.*)$/; const regWildCard = /\*$/; const regReloadStop = /reload|stop/; const thenable = Promise.resolve(); const winHistory = window.history; let historyKeyCounter = 0; const returnTrue = () => true; function decodeParam(param){ return decodeURIComponent(param.replace(regPlus, ' ')); } rb.Router = addLog({ routes: {}, mode: 'history', root: '/', current: '', regHash: /#!(.*)$/, regIndex: /\/index\.htm[l]*$/, //reload reloads the page on Router.navigate, replace uses replaceState on Router.navigate and recalls the handler and recall simply re-calls the router handler samePathStrategy: 'replace', //reload, replace, recall, stop noNavigate: false, history: null, activeHistoryIndex: -1, storageKey: 'rb_router', init({ options, listen } = {}) { this.config(options); this.initHistory(); if(listen){ this.listen(); } }, config: function (options) { options = options || {}; this.mode = options.mode != 'hash' && ('pushState' in history) ? 'history' : 'hash'; if (options.regHash) { this.regHash = options.regHash; } if (options.regIndex) { this.regIndex = options.regIndex; } this.root = options.root ? '/' + this.clearSlashes(options.root) + '/' : '/'; return this; }, getFragment: function () { let match; let fragment = ''; if (this.mode != 'hash') { fragment = decodeURI(location.pathname + location.search); fragment = this.root != '/' ? fragment.replace(this.root, '') : fragment; } else { match = window.location.href.match(this.regHash); fragment = match ? match[1] : ''; } return fragment; }, clearSlashes(path) { return path.toString().replace(regSlashBegin, '').replace(regSlashEnd, ''); }, createRouteMatcher(routeObj, parentRoute = ''){ let {path} = routeObj; const hasWildCard = regWildCard.test(path); path = this.clearSlashes(path); if(hasWildCard){ path = path.replace(regWildCard, ''); } if(parentRoute && !parentRoute.endsWith('/') && path){ parentRoute += '/'; } path = parentRoute + path; routeObj.path = path; routeObj.matcher = path ? path.split('/').map((name)=>{ const isPlaceHolder = name[0] == ':'; return { type: isPlaceHolder ? 'placeholder' : 'strict', name: isPlaceHolder ? name.slice(1) : name, }; }) : [] ; if(hasWildCard){ routeObj.matcher.push({ type: 'wildcard', }); } else if(!routeObj.matcher.length){ routeObj.matcher.push({ type: 'strict', name: '', }); } }, extendRoutes(routes, parentPath){ let path; for(path in routes){ let routeObj = routes[path]; if(typeof routeObj == 'function'){ routeObj = { handler: routeObj, }; routes[path] = routeObj; } if(routeObj.subRoutes){ if(!routeObj.handler){ routeObj.handler = returnTrue; } if(!path.endsWith('*')){ path += '*'; } } routeObj.path = path; this.createRouteMatcher(routeObj, parentPath); if(routeObj.subRoutes){ this.extendRoutes(routeObj.subRoutes, routeObj.path); } } }, /** * * @param routes * * @example * Router.map({ * '/'(){ * * }, * '/:lang': { * handler({lang}){ * return (lang in availableLangs); * }, * subRoutes: { * '/'(){ * * }, * '/user' * } * }, * '*'(){ * * } * }); */ map(routes){ this.extendRoutes(routes); this.routes = routes; }, flush() { this.routes = {}; this.mode = null; this.root = '/'; return this; }, matches(route, path){ const length = path.length + 1; if(route.length > length || ((length - 2) > route.length && route[route.length - 1].type != 'wildcard')){ return null; } let params = {}; for(let i = 0; i < length; i++){ let routePart = route[i]; let pathPart = path[i]; if(!routePart){ if(pathPart){ params = null; } } else if(routePart.type == 'wildcard'){ if(pathPart){ params['*'] = decodeParam(path.slice(i).join('/')); } break; } else if(routePart.type == 'placeholder'){ if(pathPart){ params[routePart.name] = decodeParam(pathPart); } else { params = null; } } else if(routePart.name != pathPart) { params = null; } if(!params){ break; } } return params; }, findMatchingRoutes(routes, fragment, data, options){ for(let route in routes){ route = routes[route]; let handleResult; let params = this.matches(route.matcher, fragment); if (params) { handleResult = route.handler(params, options, data); if (handleResult == null) { return null; } else if(handleResult === true){ if(route.subRoutes){ handleResult = this.findMatchingRoutes(route.subRoutes, fragment, data, options); if(handleResult !== false){ return null; } } } } } return false; }, _saveState(fragment, event = {type: 'unknown/initial'}){ const data = {fragment: fragment == null ? this.getFragment() : fragment}; const fragmentParts = data.fragment.split('?'); fragment = this.clearSlashes((fragmentParts[0] || '').replace(this.regIndex, '')); this.before = this.current; this.beforeRoute = this.currentRoute; this.beforeOptions = this.currentOptions || ''; this.current = data.fragment; this.currentRoute = fragment; this.currentOptions = fragmentParts[1] || ''; data.fragment = fragment; data.changedRoute = this.beforeRoute != this.currentRoute; data.changedOptions = this.beforeOptions != this.currentOptions; data.history = this.history; data.activeHistoryIndex = this.activeHistoryIndex; data.event = event; return data; }, applyRoutes(fragment, event) { const data = this._saveState(fragment, event); const options = deserialize(this.currentOptions); fragment = data.fragment.split('/'); if(this.noNavigate){ this.logError('Router.applyRoutes called while routes are already applied.'); } this.noNavigate = true; this.findMatchingRoutes(this.routes, fragment, data, options); this.noNavigate = false; return this; }, unlisten() { if (this._listener) { window.removeEventListener('hashchange', this._listener); window.removeEventListener('popstate', this._listener); } if (this._listener || this.interval) { clearInterval(this.interval); } return this; }, applyRoutesIfNeeded(event){ if(this.getFragment() !== this.current){ this.onRouteChanged(event); } }, onRouteChanged(event){ const cur = this.getFragment(); const stop = cur === this.current && regReloadStop.test(this.samePathStrategy); if(!stop){ this.updateActiveHistoryIndex(); this.applyRoutes(cur, event); } else if(event && event.original && event.original.type === 'popstate') { this.logWarn('route did not change, but pop event occurred'); this.updateActiveHistoryIndex(); } return this; }, initHistory: function(){ const state = winHistory.state; let currentHistoryKey = state && state.historyKey; let restoredRouterState; this.history = null; try { restoredRouterState = JSON.parse(window.sessionStorage.getItem(this.storageKey)); } catch(e) {} // eslint-disable-line no-empty if(restoredRouterState){ this.sessionHistories = restoredRouterState.sessionHistories; if(currentHistoryKey && this.sessionHistories.length){ this.history = this.sessionHistories.find((history) => { const historyIndex = history.indexOf(currentHistoryKey); if(historyIndex > -1){ this.activeHistoryIndex = historyIndex; return true; } }); } } if(!currentHistoryKey){ currentHistoryKey = this.getHistoryKey(); winHistory.replaceState({ state, historyKey: currentHistoryKey, }, ''); } this.sessionHistories = this.sessionHistories || []; if(!this.history){ this.history = [currentHistoryKey]; this.activeHistoryIndex = 0; this.sessionHistories.push(this.history); } }, updateActiveHistoryIndex(){ const currentHistoryKey = winHistory.state && winHistory.state.historyKey; if(!currentHistoryKey){ return this.logWarn('missing currentHistoryKey'); } this.activeHistoryIndex = this.history.indexOf(currentHistoryKey); if(this.activeHistoryIndex === -1){ this.logWarn('did not find key in history', currentHistoryKey, this.history, this.sessionHistories); this.history = [currentHistoryKey]; this.activeHistoryIndex = 0; this.sessionHistories.push(this.history); } this.saveRouterState(); }, getHistoryKey(){ historyKeyCounter += 1; return historyKeyCounter + '-' + getID(); }, addToHistory(historyKey, replace){ if(replace){ this.history[this.activeHistoryIndex] = historyKey; } else { // remove former history future stack const historyEndIndex = this.history.length - 1; if(historyEndIndex > this.activeHistoryIndex){ this.history.length = this.activeHistoryIndex + 1; } this.history.push(historyKey); this.activeHistoryIndex = this.history.length - 1; } this.saveRouterState(); }, saveRouterState(){ window.sessionStorage.setItem(this.storageKey, JSON.stringify({sessionHistories: this.sessionHistories})); }, listen() { this.current = this.getFragment(); this.unlisten(); if (!this._listener) { //'interval' often means either browser bug or external (disapproved) pushState/replaceState call this._listener = (e = {type: 'interval'}) => { const run = e.type != 'interval' || this.getFragment() !== this.current; if(run){ this.onRouteChanged({ type: 'popstate', original: { type: e.type, state: e.state, }, }); } }; } this.interval = setInterval(this._listener, 999); if (this.mode == 'hash') { window.addEventListener('hashchange', this._listener); } else { window.addEventListener('popstate', this._listener); } return this; }, normalizePath(path){ return this.root + this.clearSlashes(path); }, navigate(path, state = null, silent, replace) { if(this.noNavigate){ thenable.then(() => { this.navigate(...arguments); }); this.logWarn('Router.navigate called while routes are already applied.'); return this; } path = path || ''; const changedPath = this.clearSlashes(path) !== this.clearSlashes(this.current); if(typeof state == 'boolean'){ replace = silent; silent = state; state = null; } if(!changedPath){ const { samePathStrategy } = this; if(samePathStrategy.includes('reload')){ window.location.reload(); return; } else if(samePathStrategy.includes('replace') && replace !== false){ replace = true; } } const event = { type: 'navigate', replace, }; if(!state || !state.historyKey || !state.state){ state = {state, historyKey: this.getHistoryKey()}; } this.addToHistory(state.historyKey, replace); if (this.mode === 'history') { winHistory[replace === true ? 'replaceState' : 'pushState'](state, '', this.root + this.clearSlashes(path)); } else { const value = window.location.href.replace(regFullHash, '') + '#' + path; if(replace === true){ location.replace(value); } else { window.location.href = value; } } if(silent){ this._saveState(event); } else { this.onRouteChanged(event); } return this; }, push(path, state, silent){ return this.navigate(path, state, silent, false); }, replace(path, state, silent) { return this.navigate(path, state, silent, true); }, }, 2); export default rb.Router;
utils/router.js
/** * original by * http://krasimirtsonev.com/blog/article/A-modern-JavaScript-router-in-100-lines-history-api-pushState-hash-url */ import rb from './global-rb'; import deserialize from './deserialize'; import getID from './get-id'; import addLog from './add-log'; const regPlus = /\+/g; const regSlashBegin = /^\//; const regSlashEnd = /\/$/; const regFullHash = /#(.*)$/; const regWildCard = /\*$/; const regReloadStop = /reload|stop/; const thenable = Promise.resolve(); const winHistory = window.history; let historyKeyCounter = 0; const returnTrue = () => true; function decodeParam(param){ return decodeURIComponent(param.replace(regPlus, ' ')); } rb.Router = addLog({ routes: {}, mode: 'history', root: '/', regHash: /#!(.*)$/, regIndex: /\/index\.htm[l]*$/, //reload reloads the page on Router.navigate, replace uses replaceState on Router.navigate and recalls the handler and recall simply re-calls the router handler samePathStrategy: 'replace', //reload, replace, recall, stop noNavigate: false, history: null, activeHistoryIndex: -1, storageKey: 'rb_router', init({ options, listen } = {}) { this.config(options); this.initHistory(); if(listen){ this.listen(); } }, config: function (options) { options = options || {}; this.mode = options.mode != 'hash' && ('pushState' in history) ? 'history' : 'hash'; if (options.regHash) { this.regHash = options.regHash; } if (options.regIndex) { this.regIndex = options.regIndex; } this.root = options.root ? '/' + this.clearSlashes(options.root) + '/' : '/'; return this; }, getFragment: function () { let match; let fragment = ''; if (this.mode != 'hash') { fragment = decodeURI(location.pathname + location.search); fragment = this.root != '/' ? fragment.replace(this.root, '') : fragment; } else { match = window.location.href.match(this.regHash); fragment = match ? match[1] : ''; } return fragment; }, clearSlashes(path) { return path.toString().replace(regSlashBegin, '').replace(regSlashEnd, ''); }, createRouteMatcher(routeObj, parentRoute = ''){ let {path} = routeObj; const hasWildCard = regWildCard.test(path); path = this.clearSlashes(path); if(hasWildCard){ path = path.replace(regWildCard, ''); } if(parentRoute && !parentRoute.endsWith('/') && path){ parentRoute += '/'; } path = parentRoute + path; routeObj.path = path; routeObj.matcher = path ? path.split('/').map((name)=>{ const isPlaceHolder = name[0] == ':'; return { type: isPlaceHolder ? 'placeholder' : 'strict', name: isPlaceHolder ? name.slice(1) : name, }; }) : [] ; if(hasWildCard){ routeObj.matcher.push({ type: 'wildcard', }); } else if(!routeObj.matcher.length){ routeObj.matcher.push({ type: 'strict', name: '', }); } }, extendRoutes(routes, parentPath){ let path; for(path in routes){ let routeObj = routes[path]; if(typeof routeObj == 'function'){ routeObj = { handler: routeObj, }; routes[path] = routeObj; } if(routeObj.subRoutes){ if(!routeObj.handler){ routeObj.handler = returnTrue; } if(!path.endsWith('*')){ path += '*'; } } routeObj.path = path; this.createRouteMatcher(routeObj, parentPath); if(routeObj.subRoutes){ this.extendRoutes(routeObj.subRoutes, routeObj.path); } } }, /** * * @param routes * * @example * Router.map({ * '/'(){ * * }, * '/:lang': { * handler({lang}){ * return (lang in availableLangs); * }, * subRoutes: { * '/'(){ * * }, * '/user' * } * }, * '*'(){ * * } * }); */ map(routes){ this.extendRoutes(routes); this.routes = routes; }, flush() { this.routes = {}; this.mode = null; this.root = '/'; return this; }, matches(route, path){ const length = path.length + 1; if(route.length > length || ((length - 2) > route.length && route[route.length - 1].type != 'wildcard')){ return null; } let params = {}; for(let i = 0; i < length; i++){ let routePart = route[i]; let pathPart = path[i]; if(!routePart){ if(pathPart){ params = null; } } else if(routePart.type == 'wildcard'){ if(pathPart){ params['*'] = decodeParam(path.slice(i).join('/')); } break; } else if(routePart.type == 'placeholder'){ if(pathPart){ params[routePart.name] = decodeParam(pathPart); } else { params = null; } } else if(routePart.name != pathPart) { params = null; } if(!params){ break; } } return params; }, findMatchingRoutes(routes, fragment, data, options){ for(let route in routes){ route = routes[route]; let handleResult; let params = this.matches(route.matcher, fragment); if (params) { handleResult = route.handler(params, options, data); if (handleResult == null) { return null; } else if(handleResult === true){ if(route.subRoutes){ handleResult = this.findMatchingRoutes(route.subRoutes, fragment, data, options); if(handleResult !== false){ return null; } } } } } return false; }, _saveState(fragment, event = {type: 'unknown/initial'}){ const data = {fragment: fragment == null ? this.getFragment() : fragment}; const fragmentParts = data.fragment.split('?'); fragment = this.clearSlashes((fragmentParts[0] || '').replace(this.regIndex, '')); this.before = this.current; this.beforeRoute = this.currentRoute; this.beforeOptions = this.currentOptions || ''; this.current = data.fragment; this.currentRoute = fragment; this.currentOptions = fragmentParts[1] || ''; data.fragment = fragment; data.changedRoute = this.beforeRoute != this.currentRoute; data.changedOptions = this.beforeOptions != this.currentOptions; data.history = this.history; data.activeHistoryIndex = this.activeHistoryIndex; data.event = event; return data; }, applyRoutes(fragment, event) { const data = this._saveState(fragment, event); const options = deserialize(this.currentOptions); fragment = data.fragment.split('/'); if(this.noNavigate){ this.logError('Router.applyRoutes called while routes are already applied.'); } this.noNavigate = true; this.findMatchingRoutes(this.routes, fragment, data, options); this.noNavigate = false; return this; }, unlisten() { if (this._listener) { window.removeEventListener('hashchange', this._listener); window.removeEventListener('popstate', this._listener); } if (this._listener || this.interval) { clearInterval(this.interval); } return this; }, applyRoutesIfNeeded(event){ if(this.getFragment() !== this.current){ this.onRouteChanged(event); } }, onRouteChanged(event){ const cur = this.getFragment(); const stop = cur === this.current && regReloadStop.test(this.samePathStrategy); if(!stop){ this.updateActiveHistoryIndex(); this.applyRoutes(cur, event); } else if(event && event.original && event.original.type === 'popstate') { this.logWarn('route did not change, but pop event occurred'); this.updateActiveHistoryIndex(); } return this; }, initHistory: function(){ const state = winHistory.state; let currentHistoryKey = state && state.historyKey; let restoredRouterState; this.history = null; try { restoredRouterState = JSON.parse(window.sessionStorage.getItem(this.storageKey)); } catch(e) {} // eslint-disable-line no-empty if(restoredRouterState){ this.sessionHistories = restoredRouterState.sessionHistories; if(currentHistoryKey && this.sessionHistories.length){ this.history = this.sessionHistories.find((history) => { const historyIndex = history.indexOf(currentHistoryKey); if(historyIndex > -1){ this.activeHistoryIndex = historyIndex; return true; } }); } } if(!currentHistoryKey){ currentHistoryKey = this.getHistoryKey(); winHistory.replaceState({ state, historyKey: currentHistoryKey, }, ''); } this.sessionHistories = this.sessionHistories || []; if(!this.history){ this.history = [currentHistoryKey]; this.activeHistoryIndex = 0; this.sessionHistories.push(this.history); } }, updateActiveHistoryIndex(){ const currentHistoryKey = winHistory.state && winHistory.state.historyKey; if(!currentHistoryKey){ return this.logWarn('missing currentHistoryKey'); } this.activeHistoryIndex = this.history.indexOf(currentHistoryKey); if(this.activeHistoryIndex === -1){ this.logWarn('did not find key in history', currentHistoryKey, this.history, this.sessionHistories); this.history = [currentHistoryKey]; this.activeHistoryIndex = 0; this.sessionHistories.push(this.history); } this.saveRouterState(); }, getHistoryKey(){ historyKeyCounter += 1; return historyKeyCounter + '-' + getID(); }, addToHistory(historyKey, replace){ if(replace){ this.history[this.activeHistoryIndex] = historyKey; } else { // remove former history future stack const historyEndIndex = this.history.length - 1; if(historyEndIndex > this.activeHistoryIndex){ this.history.length = this.activeHistoryIndex + 1; } this.history.push(historyKey); this.activeHistoryIndex = this.history.length - 1; } this.saveRouterState(); }, saveRouterState(){ window.sessionStorage.setItem(this.storageKey, JSON.stringify({sessionHistories: this.sessionHistories})); }, listen() { this.current = this.getFragment(); this.unlisten(); if (!this._listener) { //'interval' often means either browser bug or external (disapproved) pushState/replaceState call this._listener = (e = {type: 'interval'}) => { const run = e.type != 'interval' || this.getFragment() !== this.current; if(run){ this.onRouteChanged({ type: 'popstate', original: { type: e.type, state: e.state, }, }); } }; } this.interval = setInterval(this._listener, 999); if (this.mode == 'hash') { window.addEventListener('hashchange', this._listener); } else { window.addEventListener('popstate', this._listener); } return this; }, normalizePath(path){ return this.root + this.clearSlashes(path); }, navigate(path, state = null, silent, replace) { if(this.noNavigate){ thenable.then(() => { this.navigate(...arguments); }); this.logWarn('Router.navigate called while routes are already applied.'); return this; } path = path || ''; const changedPath = this.clearSlashes(path) !== this.clearSlashes(this.current); if(typeof state == 'boolean'){ replace = silent; silent = state; state = null; } if(!changedPath){ const { samePathStrategy } = this; if(samePathStrategy.includes('reload')){ window.location.reload(); return; } else if(samePathStrategy.includes('replace') && replace !== false){ replace = true; } } const event = { type: 'navigate', replace, }; if(!state || !state.historyKey || !state.state){ state = {state, historyKey: this.getHistoryKey()}; } this.addToHistory(state.historyKey, replace); if (this.mode === 'history') { winHistory[replace === true ? 'replaceState' : 'pushState'](state, '', this.root + this.clearSlashes(path)); } else { const value = window.location.href.replace(regFullHash, '') + '#' + path; if(replace === true){ location.replace(value); } else { window.location.href = value; } } if(silent){ this._saveState(event); } else { this.onRouteChanged(event); } return this; }, push(path, state, silent){ return this.navigate(path, state, silent, false); }, replace(path, state, silent) { return this.navigate(path, state, silent, true); }, }, 2); export default rb.Router;
set empty current
utils/router.js
set empty current
<ide><path>tils/router.js <ide> routes: {}, <ide> mode: 'history', <ide> root: '/', <add> current: '', <ide> regHash: /#!(.*)$/, <ide> regIndex: /\/index\.htm[l]*$/, <ide> //reload reloads the page on Router.navigate, replace uses replaceState on Router.navigate and recalls the handler and recall simply re-calls the router handler